Fixups for EDG front end.
[gcc.git] / libstdc++-v3 / include / ext / rope
1 // SGI's rope class -*- C++ -*-
2
3 // Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 2, or (at your option)
9 // any later version.
10
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15
16 // You should have received a copy of the GNU General Public License along
17 // with this library; see the file COPYING. If not, write to the Free
18 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307,
19 // USA.
20
21 // As a special exception, you may use this file as part of a free software
22 // library without restriction. Specifically, if other files instantiate
23 // templates or use macros or inline functions from this file, or you compile
24 // this file and link it with other files to produce an executable, this
25 // file does not by itself cause the resulting executable to be covered by
26 // the GNU General Public License. This exception does not however
27 // invalidate any other reasons why the executable file might be covered by
28 // the GNU General Public License.
29
30 /*
31 * Copyright (c) 1997
32 * Silicon Graphics Computer Systems, Inc.
33 *
34 * Permission to use, copy, modify, distribute and sell this software
35 * and its documentation for any purpose is hereby granted without fee,
36 * provided that the above copyright notice appear in all copies and
37 * that both that copyright notice and this permission notice appear
38 * in supporting documentation. Silicon Graphics makes no
39 * representations about the suitability of this software for any
40 * purpose. It is provided "as is" without express or implied warranty.
41 */
42
43 /** @file ext/rope
44 * This file is a GNU extension to the Standard C++ Library (possibly
45 * containing extensions from the HP/SGI STL subset). You should only
46 * include this header if you are using GCC 3 or later.
47 */
48
49 #ifndef _ROPE
50 #define _ROPE 1
51
52 #include <bits/stl_algobase.h>
53 #include <bits/stl_construct.h>
54 #include <bits/stl_uninitialized.h>
55 #include <bits/stl_algo.h>
56 #include <bits/stl_function.h>
57 #include <bits/stl_numeric.h>
58 #include <bits/allocator.h>
59 #include <ext/hash_fun.h>
60
61 # ifdef __GC
62 # define __GC_CONST const
63 # else
64 # include <bits/gthr.h>
65 # define __GC_CONST // constant except for deallocation
66 # endif
67
68 #include <ext/memory> // For uninitialized_copy_n
69
70 namespace __gnu_cxx
71 {
72 using std::size_t;
73 using std::ptrdiff_t;
74 using std::allocator;
75 using std::iterator;
76 using std::reverse_iterator;
77 using std::_Destroy;
78
79 // The _S_eos function is used for those functions that
80 // convert to/from C-like strings to detect the end of the string.
81
82 // The end-of-C-string character.
83 // This is what the draft standard says it should be.
84 template <class _CharT>
85 inline _CharT _S_eos(_CharT*) { return _CharT(); }
86
87 // Test for basic character types.
88 // For basic character types leaves having a trailing eos.
89 template <class _CharT>
90 inline bool _S_is_basic_char_type(_CharT*) { return false; }
91 template <class _CharT>
92 inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
93
94 inline bool _S_is_basic_char_type(char*) { return true; }
95 inline bool _S_is_one_byte_char_type(char*) { return true; }
96 inline bool _S_is_basic_char_type(wchar_t*) { return true; }
97
98 // Store an eos iff _CharT is a basic character type.
99 // Do not reference _S_eos if it isn't.
100 template <class _CharT>
101 inline void _S_cond_store_eos(_CharT&) {}
102
103 inline void _S_cond_store_eos(char& __c) { __c = 0; }
104 inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
105
106 // char_producers are logically functions that generate a section of
107 // a string. These can be convereted to ropes. The resulting rope
108 // invokes the char_producer on demand. This allows, for example,
109 // files to be viewed as ropes without reading the entire file.
110 template <class _CharT>
111 class char_producer {
112 public:
113 virtual ~char_producer() {};
114 virtual void operator()(size_t __start_pos, size_t __len,
115 _CharT* __buffer) = 0;
116 // Buffer should really be an arbitrary output iterator.
117 // That way we could flatten directly into an ostream, etc.
118 // This is thoroughly impossible, since iterator types don't
119 // have runtime descriptions.
120 };
121
122 // Sequence buffers:
123 //
124 // Sequence must provide an append operation that appends an
125 // array to the sequence. Sequence buffers are useful only if
126 // appending an entire array is cheaper than appending element by element.
127 // This is true for many string representations.
128 // This should perhaps inherit from ostream<sequence::value_type>
129 // and be implemented correspondingly, so that they can be used
130 // for formatted. For the sake of portability, we don't do this yet.
131 //
132 // For now, sequence buffers behave as output iterators. But they also
133 // behave a little like basic_ostringstream<sequence::value_type> and a
134 // little like containers.
135
136 template<class _Sequence, size_t _Buf_sz = 100>
137 class sequence_buffer : public iterator<std::output_iterator_tag,void,void,void,void>
138 {
139 public:
140 typedef typename _Sequence::value_type value_type;
141 protected:
142 _Sequence* _M_prefix;
143 value_type _M_buffer[_Buf_sz];
144 size_t _M_buf_count;
145 public:
146 void flush() {
147 _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
148 _M_buf_count = 0;
149 }
150 ~sequence_buffer() { flush(); }
151 sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
152 sequence_buffer(const sequence_buffer& __x) {
153 _M_prefix = __x._M_prefix;
154 _M_buf_count = __x._M_buf_count;
155 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
156 }
157 sequence_buffer(sequence_buffer& __x) {
158 __x.flush();
159 _M_prefix = __x._M_prefix;
160 _M_buf_count = 0;
161 }
162 sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
163 sequence_buffer& operator= (sequence_buffer& __x) {
164 __x.flush();
165 _M_prefix = __x._M_prefix;
166 _M_buf_count = 0;
167 return *this;
168 }
169 sequence_buffer& operator= (const sequence_buffer& __x) {
170 _M_prefix = __x._M_prefix;
171 _M_buf_count = __x._M_buf_count;
172 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
173 return *this;
174 }
175 void push_back(value_type __x)
176 {
177 if (_M_buf_count < _Buf_sz) {
178 _M_buffer[_M_buf_count] = __x;
179 ++_M_buf_count;
180 } else {
181 flush();
182 _M_buffer[0] = __x;
183 _M_buf_count = 1;
184 }
185 }
186 void append(value_type* __s, size_t __len)
187 {
188 if (__len + _M_buf_count <= _Buf_sz) {
189 size_t __i = _M_buf_count;
190 for (size_t __j = 0; __j < __len; __i++, __j++) {
191 _M_buffer[__i] = __s[__j];
192 }
193 _M_buf_count += __len;
194 } else if (0 == _M_buf_count) {
195 _M_prefix->append(__s, __s + __len);
196 } else {
197 flush();
198 append(__s, __len);
199 }
200 }
201 sequence_buffer& write(value_type* __s, size_t __len)
202 {
203 append(__s, __len);
204 return *this;
205 }
206 sequence_buffer& put(value_type __x)
207 {
208 push_back(__x);
209 return *this;
210 }
211 sequence_buffer& operator=(const value_type& __rhs)
212 {
213 push_back(__rhs);
214 return *this;
215 }
216 sequence_buffer& operator*() { return *this; }
217 sequence_buffer& operator++() { return *this; }
218 sequence_buffer& operator++(int) { return *this; }
219 };
220
221 // The following should be treated as private, at least for now.
222 template<class _CharT>
223 class _Rope_char_consumer {
224 public:
225 // If we had member templates, these should not be virtual.
226 // For now we need to use run-time parametrization where
227 // compile-time would do. Hence this should all be private
228 // for now.
229 // The symmetry with char_producer is accidental and temporary.
230 virtual ~_Rope_char_consumer() {};
231 virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
232 };
233
234 // First a lot of forward declarations. The standard seems to require
235 // much stricter "declaration before use" than many of the implementations
236 // that preceded it.
237 template<class _CharT, class _Alloc = allocator<_CharT> > class rope;
238 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
239 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
240 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
241 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
242 template<class _CharT, class _Alloc> class _Rope_iterator;
243 template<class _CharT, class _Alloc> class _Rope_const_iterator;
244 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
245 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
246
247 template<class _CharT, class _Alloc>
248 bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
249 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y);
250
251 template<class _CharT, class _Alloc>
252 _Rope_const_iterator<_CharT,_Alloc> operator-
253 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
254 ptrdiff_t __n);
255
256 template<class _CharT, class _Alloc>
257 _Rope_const_iterator<_CharT,_Alloc> operator+
258 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
259 ptrdiff_t __n);
260
261 template<class _CharT, class _Alloc>
262 _Rope_const_iterator<_CharT,_Alloc> operator+
263 (ptrdiff_t __n,
264 const _Rope_const_iterator<_CharT,_Alloc>& __x);
265
266 template<class _CharT, class _Alloc>
267 bool operator==
268 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
269 const _Rope_const_iterator<_CharT,_Alloc>& __y);
270
271 template<class _CharT, class _Alloc>
272 bool operator<
273 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
274 const _Rope_const_iterator<_CharT,_Alloc>& __y);
275
276 template<class _CharT, class _Alloc>
277 ptrdiff_t operator-
278 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
279 const _Rope_const_iterator<_CharT,_Alloc>& __y);
280
281 template<class _CharT, class _Alloc>
282 _Rope_iterator<_CharT,_Alloc> operator-
283 (const _Rope_iterator<_CharT,_Alloc>& __x,
284 ptrdiff_t __n);
285
286 template<class _CharT, class _Alloc>
287 _Rope_iterator<_CharT,_Alloc> operator+
288 (const _Rope_iterator<_CharT,_Alloc>& __x,
289 ptrdiff_t __n);
290
291 template<class _CharT, class _Alloc>
292 _Rope_iterator<_CharT,_Alloc> operator+
293 (ptrdiff_t __n,
294 const _Rope_iterator<_CharT,_Alloc>& __x);
295
296 template<class _CharT, class _Alloc>
297 bool operator==
298 (const _Rope_iterator<_CharT,_Alloc>& __x,
299 const _Rope_iterator<_CharT,_Alloc>& __y);
300
301 template<class _CharT, class _Alloc>
302 bool operator<
303 (const _Rope_iterator<_CharT,_Alloc>& __x,
304 const _Rope_iterator<_CharT,_Alloc>& __y);
305
306 template<class _CharT, class _Alloc>
307 ptrdiff_t operator-
308 (const _Rope_iterator<_CharT,_Alloc>& __x,
309 const _Rope_iterator<_CharT,_Alloc>& __y);
310
311 template<class _CharT, class _Alloc>
312 rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
313 const rope<_CharT,_Alloc>& __right);
314
315 template<class _CharT, class _Alloc>
316 rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
317 const _CharT* __right);
318
319 template<class _CharT, class _Alloc>
320 rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
321 _CharT __right);
322
323 // Some helpers, so we can use power on ropes.
324 // See below for why this isn't local to the implementation.
325
326 // This uses a nonstandard refcount convention.
327 // The result has refcount 0.
328 template<class _CharT, class _Alloc>
329 struct _Rope_Concat_fn
330 : public std::binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
331 rope<_CharT,_Alloc> > {
332 rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
333 const rope<_CharT,_Alloc>& __y) {
334 return __x + __y;
335 }
336 };
337
338 template <class _CharT, class _Alloc>
339 inline
340 rope<_CharT,_Alloc>
341 identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
342 {
343 return rope<_CharT,_Alloc>();
344 }
345
346
347 // Class _Refcount_Base provides a type, _RC_t, a data member,
348 // _M_ref_count, and member functions _M_incr and _M_decr, which perform
349 // atomic preincrement/predecrement. The constructor initializes
350 // _M_ref_count.
351 struct _Refcount_Base
352 {
353 // The type _RC_t
354 typedef size_t _RC_t;
355
356 // The data member _M_ref_count
357 volatile _RC_t _M_ref_count;
358
359 // Constructor
360 __gthread_mutex_t _M_ref_count_lock;
361
362 _Refcount_Base(_RC_t __n) : _M_ref_count(__n)
363 {
364 #ifdef __GTHREAD_MUTEX_INIT
365 __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;
366 _M_ref_count_lock = __tmp;
367 #elif defined(__GTHREAD_MUTEX_INIT_FUNCTION)
368 __GTHREAD_MUTEX_INIT_FUNCTION (&_M_ref_count_lock);
369 #else
370 #error __GTHREAD_MUTEX_INIT or __GTHREAD_MUTEX_INIT_FUNCTION should be defined by gthr.h abstraction layer, report problem to libstdc++@gcc.gnu.org.
371 #endif
372 }
373
374 void
375 _M_incr()
376 {
377 __gthread_mutex_lock(&_M_ref_count_lock);
378 ++_M_ref_count;
379 __gthread_mutex_unlock(&_M_ref_count_lock);
380 }
381
382 _RC_t
383 _M_decr()
384 {
385 __gthread_mutex_lock(&_M_ref_count_lock);
386 volatile _RC_t __tmp = --_M_ref_count;
387 __gthread_mutex_unlock(&_M_ref_count_lock);
388 return __tmp;
389 }
390 };
391
392 //
393 // What follows should really be local to rope. Unfortunately,
394 // that doesn't work, since it makes it impossible to define generic
395 // equality on rope iterators. According to the draft standard, the
396 // template parameters for such an equality operator cannot be inferred
397 // from the occurrence of a member class as a parameter.
398 // (SGI compilers in fact allow this, but the __result wouldn't be
399 // portable.)
400 // Similarly, some of the static member functions are member functions
401 // only to avoid polluting the global namespace, and to circumvent
402 // restrictions on type inference for template functions.
403 //
404
405 //
406 // The internal data structure for representing a rope. This is
407 // private to the implementation. A rope is really just a pointer
408 // to one of these.
409 //
410 // A few basic functions for manipulating this data structure
411 // are members of _RopeRep. Most of the more complex algorithms
412 // are implemented as rope members.
413 //
414 // Some of the static member functions of _RopeRep have identically
415 // named functions in rope that simply invoke the _RopeRep versions.
416
417 #define __ROPE_DEFINE_ALLOCS(__a) \
418 __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \
419 typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
420 __ROPE_DEFINE_ALLOC(__C,_C) \
421 typedef _Rope_RopeLeaf<_CharT,__a> __L; \
422 __ROPE_DEFINE_ALLOC(__L,_L) \
423 typedef _Rope_RopeFunction<_CharT,__a> __F; \
424 __ROPE_DEFINE_ALLOC(__F,_F) \
425 typedef _Rope_RopeSubstring<_CharT,__a> __S; \
426 __ROPE_DEFINE_ALLOC(__S,_S)
427
428 // Internal rope nodes potentially store a copy of the allocator
429 // instance used to allocate them. This is mostly redundant.
430 // But the alternative would be to pass allocator instances around
431 // in some form to nearly all internal functions, since any pointer
432 // assignment may result in a zero reference count and thus require
433 // deallocation.
434
435 #define __STATIC_IF_SGI_ALLOC /* not static */
436
437 template <class _CharT, class _Alloc>
438 struct _Rope_rep_base
439 : public _Alloc
440 {
441 typedef _Alloc allocator_type;
442
443 allocator_type
444 get_allocator() const { return *static_cast<const _Alloc*>(this); }
445
446 _Rope_rep_base(size_t __size, const allocator_type&)
447 : _M_size(__size) {}
448
449 size_t _M_size;
450
451 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
452 typedef typename \
453 _Alloc::template rebind<_Tp>::other __name##Alloc; \
454 static _Tp* __name##_allocate(size_t __n) \
455 { return __name##Alloc().allocate(__n); } \
456 static void __name##_deallocate(_Tp *__p, size_t __n) \
457 { __name##Alloc().deallocate(__p, __n); }
458 __ROPE_DEFINE_ALLOCS(_Alloc)
459 # undef __ROPE_DEFINE_ALLOC
460 };
461
462 namespace _Rope_constants
463 {
464 enum { _S_max_rope_depth = 45 };
465 enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
466 }
467
468 template<class _CharT, class _Alloc>
469 struct _Rope_RopeRep : public _Rope_rep_base<_CharT,_Alloc>
470 # ifndef __GC
471 , _Refcount_Base
472 # endif
473 {
474 public:
475 _Rope_constants::_Tag _M_tag:8;
476 bool _M_is_balanced:8;
477 unsigned char _M_depth;
478 __GC_CONST _CharT* _M_c_string;
479 __gthread_mutex_t _M_c_string_lock;
480 /* Flattened version of string, if needed. */
481 /* typically 0. */
482 /* If it's not 0, then the memory is owned */
483 /* by this node. */
484 /* In the case of a leaf, this may point to */
485 /* the same memory as the data field. */
486 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
487 allocator_type;
488 using _Rope_rep_base<_CharT,_Alloc>::get_allocator;
489 _Rope_RopeRep(_Rope_constants::_Tag __t, int __d, bool __b, size_t __size,
490 allocator_type __a)
491 : _Rope_rep_base<_CharT,_Alloc>(__size, __a),
492 # ifndef __GC
493 _Refcount_Base(1),
494 # endif
495 _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0)
496 #ifdef __GTHREAD_MUTEX_INIT
497 {
498 // Do not copy a POSIX/gthr mutex once in use. However, bits are bits.
499 __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;
500 _M_c_string_lock = __tmp;
501 }
502 #else
503 { __GTHREAD_MUTEX_INIT_FUNCTION (&_M_c_string_lock); }
504 #endif
505 # ifdef __GC
506 void _M_incr () {}
507 # endif
508 static void _S_free_string(__GC_CONST _CharT*, size_t __len,
509 allocator_type __a);
510 # define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);
511 // Deallocate data section of a leaf.
512 // This shouldn't be a member function.
513 // But its hard to do anything else at the
514 // moment, because it's templatized w.r.t.
515 // an allocator.
516 // Does nothing if __GC is defined.
517 # ifndef __GC
518 void _M_free_c_string();
519 void _M_free_tree();
520 // Deallocate t. Assumes t is not 0.
521 void _M_unref_nonnil()
522 {
523 if (0 == _M_decr()) _M_free_tree();
524 }
525 void _M_ref_nonnil()
526 {
527 _M_incr();
528 }
529 static void _S_unref(_Rope_RopeRep* __t)
530 {
531 if (0 != __t) {
532 __t->_M_unref_nonnil();
533 }
534 }
535 static void _S_ref(_Rope_RopeRep* __t)
536 {
537 if (0 != __t) __t->_M_incr();
538 }
539 static void _S_free_if_unref(_Rope_RopeRep* __t)
540 {
541 if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
542 }
543 # else /* __GC */
544 void _M_unref_nonnil() {}
545 void _M_ref_nonnil() {}
546 static void _S_unref(_Rope_RopeRep*) {}
547 static void _S_ref(_Rope_RopeRep*) {}
548 static void _S_free_if_unref(_Rope_RopeRep*) {}
549 # endif
550
551 };
552
553 template<class _CharT, class _Alloc>
554 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
555 public:
556 // Apparently needed by VC++
557 // The data fields of leaves are allocated with some
558 // extra space, to accommodate future growth and for basic
559 // character types, to hold a trailing eos character.
560 enum { _S_alloc_granularity = 8 };
561 static size_t _S_rounded_up_size(size_t __n) {
562 size_t __size_with_eos;
563
564 if (_S_is_basic_char_type((_CharT*)0)) {
565 __size_with_eos = __n + 1;
566 } else {
567 __size_with_eos = __n;
568 }
569 # ifdef __GC
570 return __size_with_eos;
571 # else
572 // Allow slop for in-place expansion.
573 return (__size_with_eos + _S_alloc_granularity-1)
574 &~ (_S_alloc_granularity-1);
575 # endif
576 }
577 __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
578 /* The allocated size is */
579 /* _S_rounded_up_size(size), except */
580 /* in the GC case, in which it */
581 /* doesn't matter. */
582 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
583 allocator_type;
584 _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t __size, allocator_type __a)
585 : _Rope_RopeRep<_CharT,_Alloc>(_Rope_constants::_S_leaf, 0, true, __size, __a), _M_data(__d)
586 {
587 if (_S_is_basic_char_type((_CharT *)0)) {
588 // already eos terminated.
589 this->_M_c_string = __d;
590 }
591 }
592 // The constructor assumes that d has been allocated with
593 // the proper allocator and the properly padded size.
594 // In contrast, the destructor deallocates the data:
595 # ifndef __GC
596 ~_Rope_RopeLeaf() {
597 if (_M_data != this->_M_c_string) {
598 this->_M_free_c_string();
599 }
600 __STL_FREE_STRING(_M_data, this->_M_size, this->get_allocator());
601 }
602 # endif
603 };
604
605 template<class _CharT, class _Alloc>
606 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
607 public:
608 _Rope_RopeRep<_CharT,_Alloc>* _M_left;
609 _Rope_RopeRep<_CharT,_Alloc>* _M_right;
610 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
611 allocator_type;
612 _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
613 _Rope_RopeRep<_CharT,_Alloc>* __r,
614 allocator_type __a)
615
616 : _Rope_RopeRep<_CharT,_Alloc>(_Rope_constants::_S_concat,
617 std::max(__l->_M_depth, __r->_M_depth) + 1,
618 false,
619 __l->_M_size + __r->_M_size, __a),
620 _M_left(__l), _M_right(__r)
621 {}
622 # ifndef __GC
623 ~_Rope_RopeConcatenation() {
624 this->_M_free_c_string();
625 _M_left->_M_unref_nonnil();
626 _M_right->_M_unref_nonnil();
627 }
628 # endif
629 };
630
631 template<class _CharT, class _Alloc>
632 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
633 public:
634 char_producer<_CharT>* _M_fn;
635 # ifndef __GC
636 bool _M_delete_when_done; // Char_producer is owned by the
637 // rope and should be explicitly
638 // deleted when the rope becomes
639 // inaccessible.
640 # else
641 // In the GC case, we either register the rope for
642 // finalization, or not. Thus the field is unnecessary;
643 // the information is stored in the collector data structures.
644 // We do need a finalization procedure to be invoked by the
645 // collector.
646 static void _S_fn_finalization_proc(void * __tree, void *) {
647 delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
648 }
649 # endif
650 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
651 allocator_type;
652 _Rope_RopeFunction(char_producer<_CharT>* __f, size_t __size,
653 bool __d, allocator_type __a)
654 : _Rope_RopeRep<_CharT,_Alloc>(_Rope_constants::_S_function,
655 0, true, __size, __a)
656 , _M_fn(__f)
657 # ifndef __GC
658 , _M_delete_when_done(__d)
659 # endif
660 {
661 # ifdef __GC
662 if (__d) {
663 GC_REGISTER_FINALIZER(
664 this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
665 }
666 # endif
667 }
668 # ifndef __GC
669 ~_Rope_RopeFunction() {
670 this->_M_free_c_string();
671 if (_M_delete_when_done) {
672 delete _M_fn;
673 }
674 }
675 # endif
676 };
677 // Substring results are usually represented using just
678 // concatenation nodes. But in the case of very long flat ropes
679 // or ropes with a functional representation that isn't practical.
680 // In that case, we represent the __result as a special case of
681 // RopeFunction, whose char_producer points back to the rope itself.
682 // In all cases except repeated substring operations and
683 // deallocation, we treat the __result as a RopeFunction.
684 template<class _CharT, class _Alloc>
685 struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
686 public char_producer<_CharT> {
687 public:
688 // XXX this whole class should be rewritten.
689 _Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0
690 size_t _M_start;
691 virtual void operator()(size_t __start_pos, size_t __req_len,
692 _CharT* __buffer) {
693 switch(_M_base->_M_tag) {
694 case _Rope_constants::_S_function:
695 case _Rope_constants::_S_substringfn:
696 {
697 char_producer<_CharT>* __fn =
698 ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
699 (*__fn)(__start_pos + _M_start, __req_len, __buffer);
700 }
701 break;
702 case _Rope_constants::_S_leaf:
703 {
704 __GC_CONST _CharT* __s =
705 ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
706 uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
707 __buffer);
708 }
709 break;
710 default:
711 break;
712 }
713 }
714 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
715 allocator_type;
716 _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
717 size_t __l, allocator_type __a)
718 : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
719 char_producer<_CharT>(),
720 _M_base(__b),
721 _M_start(__s)
722 {
723 # ifndef __GC
724 _M_base->_M_ref_nonnil();
725 # endif
726 this->_M_tag = _Rope_constants::_S_substringfn;
727 }
728 virtual ~_Rope_RopeSubstring()
729 {
730 # ifndef __GC
731 _M_base->_M_unref_nonnil();
732 // _M_free_c_string(); -- done by parent class
733 # endif
734 }
735 };
736
737
738 // Self-destructing pointers to Rope_rep.
739 // These are not conventional smart pointers. Their
740 // only purpose in life is to ensure that unref is called
741 // on the pointer either at normal exit or if an exception
742 // is raised. It is the caller's responsibility to
743 // adjust reference counts when these pointers are initialized
744 // or assigned to. (This convention significantly reduces
745 // the number of potentially expensive reference count
746 // updates.)
747 #ifndef __GC
748 template<class _CharT, class _Alloc>
749 struct _Rope_self_destruct_ptr {
750 _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
751 ~_Rope_self_destruct_ptr()
752 { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
753 #ifdef __EXCEPTIONS
754 _Rope_self_destruct_ptr() : _M_ptr(0) {};
755 #else
756 _Rope_self_destruct_ptr() {};
757 #endif
758 _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
759 _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
760 _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
761 operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
762 _Rope_self_destruct_ptr& operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
763 { _M_ptr = __x; return *this; }
764 };
765 #endif
766
767 // Dereferencing a nonconst iterator has to return something
768 // that behaves almost like a reference. It's not possible to
769 // return an actual reference since assignment requires extra
770 // work. And we would get into the same problems as with the
771 // CD2 version of basic_string.
772 template<class _CharT, class _Alloc>
773 class _Rope_char_ref_proxy {
774 friend class rope<_CharT,_Alloc>;
775 friend class _Rope_iterator<_CharT,_Alloc>;
776 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
777 # ifdef __GC
778 typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
779 # else
780 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
781 # endif
782 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
783 typedef rope<_CharT,_Alloc> _My_rope;
784 size_t _M_pos;
785 _CharT _M_current;
786 bool _M_current_valid;
787 _My_rope* _M_root; // The whole rope.
788 public:
789 _Rope_char_ref_proxy(_My_rope* __r, size_t __p)
790 : _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
791 _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x)
792 : _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
793 // Don't preserve cache if the reference can outlive the
794 // expression. We claim that's not possible without calling
795 // a copy constructor or generating reference to a proxy
796 // reference. We declare the latter to have undefined semantics.
797 _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
798 : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
799 inline operator _CharT () const;
800 _Rope_char_ref_proxy& operator= (_CharT __c);
801 _Rope_char_ptr_proxy<_CharT,_Alloc> operator& () const;
802 _Rope_char_ref_proxy& operator= (const _Rope_char_ref_proxy& __c) {
803 return operator=((_CharT)__c);
804 }
805 };
806
807 template<class _CharT, class __Alloc>
808 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
809 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
810 _CharT __tmp = __a;
811 __a = __b;
812 __b = __tmp;
813 }
814
815 template<class _CharT, class _Alloc>
816 class _Rope_char_ptr_proxy {
817 // XXX this class should be rewritten.
818 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
819 size_t _M_pos;
820 rope<_CharT,_Alloc>* _M_root; // The whole rope.
821 public:
822 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
823 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
824 _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x)
825 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
826 _Rope_char_ptr_proxy() {}
827 _Rope_char_ptr_proxy(_CharT* __x) : _M_root(0), _M_pos(0) {
828 }
829 _Rope_char_ptr_proxy&
830 operator= (const _Rope_char_ptr_proxy& __x) {
831 _M_pos = __x._M_pos;
832 _M_root = __x._M_root;
833 return *this;
834 }
835 template<class _CharT2, class _Alloc2>
836 friend bool operator== (const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __x,
837 const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __y);
838 _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
839 return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
840 }
841 };
842
843
844 // Rope iterators:
845 // Unlike in the C version, we cache only part of the stack
846 // for rope iterators, since they must be efficiently copyable.
847 // When we run out of cache, we have to reconstruct the iterator
848 // value.
849 // Pointers from iterators are not included in reference counts.
850 // Iterators are assumed to be thread private. Ropes can
851 // be shared.
852
853 template<class _CharT, class _Alloc>
854 class _Rope_iterator_base
855 : public iterator<std::random_access_iterator_tag, _CharT>
856 {
857 friend class rope<_CharT,_Alloc>;
858 public:
859 typedef _Alloc _allocator_type; // used in _Rope_rotate, VC++ workaround
860 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
861 // Borland doesn't want this to be protected.
862 protected:
863 enum { _S_path_cache_len = 4 }; // Must be <= 9.
864 enum { _S_iterator_buf_len = 15 };
865 size_t _M_current_pos;
866 _RopeRep* _M_root; // The whole rope.
867 size_t _M_leaf_pos; // Starting position for current leaf
868 __GC_CONST _CharT* _M_buf_start;
869 // Buffer possibly
870 // containing current char.
871 __GC_CONST _CharT* _M_buf_ptr;
872 // Pointer to current char in buffer.
873 // != 0 ==> buffer valid.
874 __GC_CONST _CharT* _M_buf_end;
875 // One past __last valid char in buffer.
876 // What follows is the path cache. We go out of our
877 // way to make this compact.
878 // Path_end contains the bottom section of the path from
879 // the root to the current leaf.
880 const _RopeRep* _M_path_end[_S_path_cache_len];
881 int _M_leaf_index; // Last valid __pos in path_end;
882 // _M_path_end[0] ... _M_path_end[leaf_index-1]
883 // point to concatenation nodes.
884 unsigned char _M_path_directions;
885 // (path_directions >> __i) & 1 is 1
886 // iff we got from _M_path_end[leaf_index - __i - 1]
887 // to _M_path_end[leaf_index - __i] by going to the
888 // __right. Assumes path_cache_len <= 9.
889 _CharT _M_tmp_buf[_S_iterator_buf_len];
890 // Short buffer for surrounding chars.
891 // This is useful primarily for
892 // RopeFunctions. We put the buffer
893 // here to avoid locking in the
894 // multithreaded case.
895 // The cached path is generally assumed to be valid
896 // only if the buffer is valid.
897 static void _S_setbuf(_Rope_iterator_base& __x);
898 // Set buffer contents given
899 // path cache.
900 static void _S_setcache(_Rope_iterator_base& __x);
901 // Set buffer contents and
902 // path cache.
903 static void _S_setcache_for_incr(_Rope_iterator_base& __x);
904 // As above, but assumes path
905 // cache is valid for previous posn.
906 _Rope_iterator_base() {}
907 _Rope_iterator_base(_RopeRep* __root, size_t __pos)
908 : _M_current_pos(__pos), _M_root(__root), _M_buf_ptr(0) {}
909 void _M_incr(size_t __n);
910 void _M_decr(size_t __n);
911 public:
912 size_t index() const { return _M_current_pos; }
913 _Rope_iterator_base(const _Rope_iterator_base& __x) {
914 if (0 != __x._M_buf_ptr) {
915 *this = __x;
916 } else {
917 _M_current_pos = __x._M_current_pos;
918 _M_root = __x._M_root;
919 _M_buf_ptr = 0;
920 }
921 }
922 };
923
924 template<class _CharT, class _Alloc> class _Rope_iterator;
925
926 template<class _CharT, class _Alloc>
927 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
928 friend class rope<_CharT,_Alloc>;
929 protected:
930 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
931 // The one from the base class may not be directly visible.
932 _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
933 _Rope_iterator_base<_CharT,_Alloc>(
934 const_cast<_RopeRep*>(__root), __pos)
935 // Only nonconst iterators modify root ref count
936 {}
937 public:
938 typedef _CharT reference; // Really a value. Returning a reference
939 // Would be a mess, since it would have
940 // to be included in refcount.
941 typedef const _CharT* pointer;
942
943 public:
944 _Rope_const_iterator() {};
945 _Rope_const_iterator(const _Rope_const_iterator& __x) :
946 _Rope_iterator_base<_CharT,_Alloc>(__x) { }
947 _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x);
948 _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
949 _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) {}
950 _Rope_const_iterator& operator= (const _Rope_const_iterator& __x) {
951 if (0 != __x._M_buf_ptr) {
952 *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
953 } else {
954 this->_M_current_pos = __x._M_current_pos;
955 this->_M_root = __x._M_root;
956 this->_M_buf_ptr = 0;
957 }
958 return(*this);
959 }
960 reference operator*() {
961 if (0 == this->_M_buf_ptr) _S_setcache(*this);
962 return *this->_M_buf_ptr;
963 }
964 _Rope_const_iterator& operator++() {
965 __GC_CONST _CharT* __next;
966 if (0 != this->_M_buf_ptr
967 && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
968 this->_M_buf_ptr = __next;
969 ++this->_M_current_pos;
970 } else {
971 this->_M_incr(1);
972 }
973 return *this;
974 }
975 _Rope_const_iterator& operator+=(ptrdiff_t __n) {
976 if (__n >= 0) {
977 this->_M_incr(__n);
978 } else {
979 this->_M_decr(-__n);
980 }
981 return *this;
982 }
983 _Rope_const_iterator& operator--() {
984 this->_M_decr(1);
985 return *this;
986 }
987 _Rope_const_iterator& operator-=(ptrdiff_t __n) {
988 if (__n >= 0) {
989 this->_M_decr(__n);
990 } else {
991 this->_M_incr(-__n);
992 }
993 return *this;
994 }
995 _Rope_const_iterator operator++(int) {
996 size_t __old_pos = this->_M_current_pos;
997 this->_M_incr(1);
998 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
999 // This makes a subsequent dereference expensive.
1000 // Perhaps we should instead copy the iterator
1001 // if it has a valid cache?
1002 }
1003 _Rope_const_iterator operator--(int) {
1004 size_t __old_pos = this->_M_current_pos;
1005 this->_M_decr(1);
1006 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
1007 }
1008 template<class _CharT2, class _Alloc2>
1009 friend _Rope_const_iterator<_CharT2,_Alloc2> operator-
1010 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1011 ptrdiff_t __n);
1012 template<class _CharT2, class _Alloc2>
1013 friend _Rope_const_iterator<_CharT2,_Alloc2> operator+
1014 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1015 ptrdiff_t __n);
1016 template<class _CharT2, class _Alloc2>
1017 friend _Rope_const_iterator<_CharT2,_Alloc2> operator+
1018 (ptrdiff_t __n,
1019 const _Rope_const_iterator<_CharT2,_Alloc2>& __x);
1020 reference operator[](size_t __n) {
1021 return rope<_CharT,_Alloc>::_S_fetch(this->_M_root,
1022 this->_M_current_pos + __n);
1023 }
1024
1025 template<class _CharT2, class _Alloc2>
1026 friend bool operator==
1027 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1028 const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
1029 template<class _CharT2, class _Alloc2>
1030 friend bool operator<
1031 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1032 const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
1033 template<class _CharT2, class _Alloc2>
1034 friend ptrdiff_t operator-
1035 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1036 const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
1037 };
1038
1039 template<class _CharT, class _Alloc>
1040 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1041 friend class rope<_CharT,_Alloc>;
1042 protected:
1043 typedef typename _Rope_iterator_base<_CharT,_Alloc>::_RopeRep _RopeRep;
1044 rope<_CharT,_Alloc>* _M_root_rope;
1045 // root is treated as a cached version of this,
1046 // and is used to detect changes to the underlying
1047 // rope.
1048 // Root is included in the reference count.
1049 // This is necessary so that we can detect changes reliably.
1050 // Unfortunately, it requires careful bookkeeping for the
1051 // nonGC case.
1052 _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos)
1053 : _Rope_iterator_base<_CharT,_Alloc>(__r->_M_tree_ptr, __pos),
1054 _M_root_rope(__r)
1055 { _RopeRep::_S_ref(this->_M_root);
1056 if (!(__r -> empty()))_S_setcache(*this); }
1057
1058 void _M_check();
1059 public:
1060 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1061 typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
1062
1063 public:
1064 rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
1065 _Rope_iterator() {
1066 this->_M_root = 0; // Needed for reference counting.
1067 };
1068 _Rope_iterator(const _Rope_iterator& __x) :
1069 _Rope_iterator_base<_CharT,_Alloc>(__x) {
1070 _M_root_rope = __x._M_root_rope;
1071 _RopeRep::_S_ref(this->_M_root);
1072 }
1073 _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
1074 ~_Rope_iterator() {
1075 _RopeRep::_S_unref(this->_M_root);
1076 }
1077 _Rope_iterator& operator= (const _Rope_iterator& __x) {
1078 _RopeRep* __old = this->_M_root;
1079
1080 _RopeRep::_S_ref(__x._M_root);
1081 if (0 != __x._M_buf_ptr) {
1082 _M_root_rope = __x._M_root_rope;
1083 *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
1084 } else {
1085 this->_M_current_pos = __x._M_current_pos;
1086 this->_M_root = __x._M_root;
1087 _M_root_rope = __x._M_root_rope;
1088 this->_M_buf_ptr = 0;
1089 }
1090 _RopeRep::_S_unref(__old);
1091 return(*this);
1092 }
1093 reference operator*() {
1094 _M_check();
1095 if (0 == this->_M_buf_ptr) {
1096 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1097 _M_root_rope, this->_M_current_pos);
1098 } else {
1099 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1100 _M_root_rope, this->_M_current_pos, *this->_M_buf_ptr);
1101 }
1102 }
1103 _Rope_iterator& operator++() {
1104 this->_M_incr(1);
1105 return *this;
1106 }
1107 _Rope_iterator& operator+=(ptrdiff_t __n) {
1108 if (__n >= 0) {
1109 this->_M_incr(__n);
1110 } else {
1111 this->_M_decr(-__n);
1112 }
1113 return *this;
1114 }
1115 _Rope_iterator& operator--() {
1116 this->_M_decr(1);
1117 return *this;
1118 }
1119 _Rope_iterator& operator-=(ptrdiff_t __n) {
1120 if (__n >= 0) {
1121 this->_M_decr(__n);
1122 } else {
1123 this->_M_incr(-__n);
1124 }
1125 return *this;
1126 }
1127 _Rope_iterator operator++(int) {
1128 size_t __old_pos = this->_M_current_pos;
1129 this->_M_incr(1);
1130 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1131 }
1132 _Rope_iterator operator--(int) {
1133 size_t __old_pos = this->_M_current_pos;
1134 this->_M_decr(1);
1135 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1136 }
1137 reference operator[](ptrdiff_t __n) {
1138 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1139 _M_root_rope, this->_M_current_pos + __n);
1140 }
1141
1142 template<class _CharT2, class _Alloc2>
1143 friend bool operator==
1144 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1145 const _Rope_iterator<_CharT2,_Alloc2>& __y);
1146 template<class _CharT2, class _Alloc2>
1147 friend bool operator<
1148 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1149 const _Rope_iterator<_CharT2,_Alloc2>& __y);
1150 template<class _CharT2, class _Alloc2>
1151 friend ptrdiff_t operator-
1152 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1153 const _Rope_iterator<_CharT2,_Alloc2>& __y);
1154 template<class _CharT2, class _Alloc2>
1155 friend _Rope_iterator<_CharT2,_Alloc2> operator-
1156 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1157 ptrdiff_t __n);
1158 template<class _CharT2, class _Alloc2>
1159 friend _Rope_iterator<_CharT2,_Alloc2> operator+
1160 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1161 ptrdiff_t __n);
1162 template<class _CharT2, class _Alloc2>
1163 friend _Rope_iterator<_CharT2,_Alloc2> operator+
1164 (ptrdiff_t __n,
1165 const _Rope_iterator<_CharT2,_Alloc2>& __x);
1166 };
1167
1168
1169 template <class _CharT, class _Alloc>
1170 struct _Rope_base
1171 : public _Alloc
1172 {
1173 typedef _Alloc allocator_type;
1174
1175 allocator_type
1176 get_allocator() const { return *static_cast<const _Alloc*>(this); }
1177
1178 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1179 // The one in _Base may not be visible due to template rules.
1180
1181 _Rope_base(_RopeRep* __t, const allocator_type&)
1182 : _M_tree_ptr(__t) {}
1183 _Rope_base(const allocator_type&) {}
1184
1185 // The only data member of a rope:
1186 _RopeRep *_M_tree_ptr;
1187
1188 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1189 typedef typename \
1190 _Alloc::template rebind<_Tp>::other __name##Alloc; \
1191 static _Tp* __name##_allocate(size_t __n) \
1192 { return __name##Alloc().allocate(__n); } \
1193 static void __name##_deallocate(_Tp *__p, size_t __n) \
1194 { __name##Alloc().deallocate(__p, __n); }
1195 __ROPE_DEFINE_ALLOCS(_Alloc)
1196 # undef __ROPE_DEFINE_ALLOC
1197 };
1198
1199
1200 /**
1201 * This is an SGI extension.
1202 * @ingroup SGIextensions
1203 * @doctodo
1204 */
1205 template <class _CharT, class _Alloc>
1206 class rope : public _Rope_base<_CharT,_Alloc> {
1207 public:
1208 typedef _CharT value_type;
1209 typedef ptrdiff_t difference_type;
1210 typedef size_t size_type;
1211 typedef _CharT const_reference;
1212 typedef const _CharT* const_pointer;
1213 typedef _Rope_iterator<_CharT,_Alloc> iterator;
1214 typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1215 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1216 typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1217
1218 friend class _Rope_iterator<_CharT,_Alloc>;
1219 friend class _Rope_const_iterator<_CharT,_Alloc>;
1220 friend struct _Rope_RopeRep<_CharT,_Alloc>;
1221 friend class _Rope_iterator_base<_CharT,_Alloc>;
1222 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1223 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1224 friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1225
1226 protected:
1227 typedef _Rope_base<_CharT,_Alloc> _Base;
1228 typedef typename _Base::allocator_type allocator_type;
1229 using _Base::_M_tree_ptr;
1230 using _Base::get_allocator;
1231 typedef __GC_CONST _CharT* _Cstrptr;
1232
1233 static _CharT _S_empty_c_str[1];
1234
1235 static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
1236 enum { _S_copy_max = 23 };
1237 // For strings shorter than _S_copy_max, we copy to
1238 // concatenate.
1239
1240 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1241 typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1242 typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1243 typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1244 typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1245
1246 // Retrieve a character at the indicated position.
1247 static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1248
1249 # ifndef __GC
1250 // Obtain a pointer to the character at the indicated position.
1251 // The pointer can be used to change the character.
1252 // If such a pointer cannot be produced, as is frequently the
1253 // case, 0 is returned instead.
1254 // (Returns nonzero only if all nodes in the path have a refcount
1255 // of 1.)
1256 static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1257 # endif
1258
1259 static bool _S_apply_to_pieces(
1260 // should be template parameter
1261 _Rope_char_consumer<_CharT>& __c,
1262 const _RopeRep* __r,
1263 size_t __begin, size_t __end);
1264 // begin and end are assumed to be in range.
1265
1266 # ifndef __GC
1267 static void _S_unref(_RopeRep* __t)
1268 {
1269 _RopeRep::_S_unref(__t);
1270 }
1271 static void _S_ref(_RopeRep* __t)
1272 {
1273 _RopeRep::_S_ref(__t);
1274 }
1275 # else /* __GC */
1276 static void _S_unref(_RopeRep*) {}
1277 static void _S_ref(_RopeRep*) {}
1278 # endif
1279
1280
1281 # ifdef __GC
1282 typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1283 # else
1284 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1285 # endif
1286
1287 // _Result is counted in refcount.
1288 static _RopeRep* _S_substring(_RopeRep* __base,
1289 size_t __start, size_t __endp1);
1290
1291 static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1292 const _CharT* __iter, size_t __slen);
1293 // Concatenate rope and char ptr, copying __s.
1294 // Should really take an arbitrary iterator.
1295 // Result is counted in refcount.
1296 static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1297 const _CharT* __iter, size_t __slen)
1298 // As above, but one reference to __r is about to be
1299 // destroyed. Thus the pieces may be recycled if all
1300 // relevant reference counts are 1.
1301 # ifdef __GC
1302 // We can't really do anything since refcounts are unavailable.
1303 { return _S_concat_char_iter(__r, __iter, __slen); }
1304 # else
1305 ;
1306 # endif
1307
1308 static _RopeRep* _S_concat(_RopeRep* __left, _RopeRep* __right);
1309 // General concatenation on _RopeRep. _Result
1310 // has refcount of 1. Adjusts argument refcounts.
1311
1312 public:
1313 void apply_to_pieces( size_t __begin, size_t __end,
1314 _Rope_char_consumer<_CharT>& __c) const {
1315 _S_apply_to_pieces(__c, this->_M_tree_ptr, __begin, __end);
1316 }
1317
1318
1319 protected:
1320
1321 static size_t _S_rounded_up_size(size_t __n) {
1322 return _RopeLeaf::_S_rounded_up_size(__n);
1323 }
1324
1325 static size_t _S_allocated_capacity(size_t __n) {
1326 if (_S_is_basic_char_type((_CharT*)0)) {
1327 return _S_rounded_up_size(__n) - 1;
1328 } else {
1329 return _S_rounded_up_size(__n);
1330 }
1331 }
1332
1333 // Allocate and construct a RopeLeaf using the supplied allocator
1334 // Takes ownership of s instead of copying.
1335 static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
1336 size_t __size, allocator_type __a)
1337 {
1338 _RopeLeaf* __space = typename _Base::_LAlloc(__a).allocate(1);
1339 return new(__space) _RopeLeaf(__s, __size, __a);
1340 }
1341
1342 static _RopeConcatenation* _S_new_RopeConcatenation(
1343 _RopeRep* __left, _RopeRep* __right,
1344 allocator_type __a)
1345 {
1346 _RopeConcatenation* __space = typename _Base::_CAlloc(__a).allocate(1);
1347 return new(__space) _RopeConcatenation(__left, __right, __a);
1348 }
1349
1350 static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1351 size_t __size, bool __d, allocator_type __a)
1352 {
1353 _RopeFunction* __space = typename _Base::_FAlloc(__a).allocate(1);
1354 return new(__space) _RopeFunction(__f, __size, __d, __a);
1355 }
1356
1357 static _RopeSubstring* _S_new_RopeSubstring(
1358 _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1359 size_t __l, allocator_type __a)
1360 {
1361 _RopeSubstring* __space = typename _Base::_SAlloc(__a).allocate(1);
1362 return new(__space) _RopeSubstring(__b, __s, __l, __a);
1363 }
1364
1365 static
1366 _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1367 size_t __size, allocator_type __a)
1368 # define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
1369 _S_RopeLeaf_from_unowned_char_ptr(__s, __size, __a)
1370 {
1371 if (0 == __size) return 0;
1372 _CharT* __buf = __a.allocate(_S_rounded_up_size(__size));
1373
1374 uninitialized_copy_n(__s, __size, __buf);
1375 _S_cond_store_eos(__buf[__size]);
1376 try {
1377 return _S_new_RopeLeaf(__buf, __size, __a);
1378 }
1379 catch(...)
1380 {
1381 _RopeRep::__STL_FREE_STRING(__buf, __size, __a);
1382 __throw_exception_again;
1383 }
1384 }
1385
1386
1387 // Concatenation of nonempty strings.
1388 // Always builds a concatenation node.
1389 // Rebalances if the result is too deep.
1390 // Result has refcount 1.
1391 // Does not increment left and right ref counts even though
1392 // they are referenced.
1393 static _RopeRep*
1394 _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1395
1396 // Concatenation helper functions
1397 static _RopeLeaf*
1398 _S_leaf_concat_char_iter(_RopeLeaf* __r,
1399 const _CharT* __iter, size_t __slen);
1400 // Concatenate by copying leaf.
1401 // should take an arbitrary iterator
1402 // result has refcount 1.
1403 # ifndef __GC
1404 static _RopeLeaf* _S_destr_leaf_concat_char_iter
1405 (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1406 // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1407 # endif
1408
1409 private:
1410
1411 static size_t _S_char_ptr_len(const _CharT* __s);
1412 // slightly generalized strlen
1413
1414 rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1415 : _Base(__t,__a) { }
1416
1417
1418 // Copy __r to the _CharT buffer.
1419 // Returns __buffer + __r->_M_size.
1420 // Assumes that buffer is uninitialized.
1421 static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1422
1423 // Again, with explicit starting position and length.
1424 // Assumes that buffer is uninitialized.
1425 static _CharT* _S_flatten(_RopeRep* __r,
1426 size_t __start, size_t __len,
1427 _CharT* __buffer);
1428
1429 static const unsigned long
1430 _S_min_len[_Rope_constants::_S_max_rope_depth + 1];
1431
1432 static bool _S_is_balanced(_RopeRep* __r)
1433 { return (__r->_M_size >= _S_min_len[__r->_M_depth]); }
1434
1435 static bool _S_is_almost_balanced(_RopeRep* __r)
1436 { return (__r->_M_depth == 0 ||
1437 __r->_M_size >= _S_min_len[__r->_M_depth - 1]); }
1438
1439 static bool _S_is_roughly_balanced(_RopeRep* __r)
1440 { return (__r->_M_depth <= 1 ||
1441 __r->_M_size >= _S_min_len[__r->_M_depth - 2]); }
1442
1443 // Assumes the result is not empty.
1444 static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1445 _RopeRep* __right)
1446 {
1447 _RopeRep* __result = _S_concat(__left, __right);
1448 if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1449 return __result;
1450 }
1451
1452 // The basic rebalancing operation. Logically copies the
1453 // rope. The result has refcount of 1. The client will
1454 // usually decrement the reference count of __r.
1455 // The result is within height 2 of balanced by the above
1456 // definition.
1457 static _RopeRep* _S_balance(_RopeRep* __r);
1458
1459 // Add all unbalanced subtrees to the forest of balanceed trees.
1460 // Used only by balance.
1461 static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1462
1463 // Add __r to forest, assuming __r is already balanced.
1464 static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1465
1466 // Print to stdout, exposing structure
1467 static void _S_dump(_RopeRep* __r, int __indent = 0);
1468
1469 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1470 static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1471
1472 public:
1473 bool empty() const { return 0 == this->_M_tree_ptr; }
1474
1475 // Comparison member function. This is public only for those
1476 // clients that need a ternary comparison. Others
1477 // should use the comparison operators below.
1478 int compare(const rope& __y) const {
1479 return _S_compare(this->_M_tree_ptr, __y._M_tree_ptr);
1480 }
1481
1482 rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1483 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),
1484 __a),__a)
1485 { }
1486
1487 rope(const _CharT* __s, size_t __len,
1488 const allocator_type& __a = allocator_type())
1489 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a), __a)
1490 { }
1491
1492 // Should perhaps be templatized with respect to the iterator type
1493 // and use Sequence_buffer. (It should perhaps use sequence_buffer
1494 // even now.)
1495 rope(const _CharT *__s, const _CharT *__e,
1496 const allocator_type& __a = allocator_type())
1497 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a), __a)
1498 { }
1499
1500 rope(const const_iterator& __s, const const_iterator& __e,
1501 const allocator_type& __a = allocator_type())
1502 : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1503 __e._M_current_pos), __a)
1504 { }
1505
1506 rope(const iterator& __s, const iterator& __e,
1507 const allocator_type& __a = allocator_type())
1508 : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1509 __e._M_current_pos), __a)
1510 { }
1511
1512 rope(_CharT __c, const allocator_type& __a = allocator_type())
1513 : _Base(__a)
1514 {
1515 _CharT* __buf = __a.allocate(_S_rounded_up_size(1));
1516
1517 std::_Construct(__buf, __c);
1518 try {
1519 this->_M_tree_ptr = _S_new_RopeLeaf(__buf, 1, __a);
1520 }
1521 catch(...)
1522 {
1523 _RopeRep::__STL_FREE_STRING(__buf, 1, __a);
1524 __throw_exception_again;
1525 }
1526 }
1527
1528 rope(size_t __n, _CharT __c,
1529 const allocator_type& __a = allocator_type());
1530
1531 rope(const allocator_type& __a = allocator_type())
1532 : _Base(0, __a) {}
1533
1534 // Construct a rope from a function that can compute its members
1535 rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1536 const allocator_type& __a = allocator_type())
1537 : _Base(__a)
1538 {
1539 this->_M_tree_ptr = (0 == __len) ?
1540 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1541 }
1542
1543 rope(const rope& __x, const allocator_type& __a = allocator_type())
1544 : _Base(__x._M_tree_ptr, __a)
1545 {
1546 _S_ref(this->_M_tree_ptr);
1547 }
1548
1549 ~rope()
1550 {
1551 _S_unref(this->_M_tree_ptr);
1552 }
1553
1554 rope& operator=(const rope& __x)
1555 {
1556 _RopeRep* __old = this->_M_tree_ptr;
1557 this->_M_tree_ptr = __x._M_tree_ptr;
1558 _S_ref(this->_M_tree_ptr);
1559 _S_unref(__old);
1560 return(*this);
1561 }
1562
1563 void clear()
1564 {
1565 _S_unref(this->_M_tree_ptr);
1566 this->_M_tree_ptr = 0;
1567 }
1568
1569 void push_back(_CharT __x)
1570 {
1571 _RopeRep* __old = this->_M_tree_ptr;
1572 this->_M_tree_ptr
1573 = _S_destr_concat_char_iter(this->_M_tree_ptr, &__x, 1);
1574 _S_unref(__old);
1575 }
1576
1577 void pop_back()
1578 {
1579 _RopeRep* __old = this->_M_tree_ptr;
1580 this->_M_tree_ptr =
1581 _S_substring(this->_M_tree_ptr,
1582 0,
1583 this->_M_tree_ptr->_M_size - 1);
1584 _S_unref(__old);
1585 }
1586
1587 _CharT back() const
1588 {
1589 return _S_fetch(this->_M_tree_ptr, this->_M_tree_ptr->_M_size - 1);
1590 }
1591
1592 void push_front(_CharT __x)
1593 {
1594 _RopeRep* __old = this->_M_tree_ptr;
1595 _RopeRep* __left =
1596 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, this->get_allocator());
1597 try {
1598 this->_M_tree_ptr = _S_concat(__left, this->_M_tree_ptr);
1599 _S_unref(__old);
1600 _S_unref(__left);
1601 }
1602 catch(...)
1603 {
1604 _S_unref(__left);
1605 __throw_exception_again;
1606 }
1607 }
1608
1609 void pop_front()
1610 {
1611 _RopeRep* __old = this->_M_tree_ptr;
1612 this->_M_tree_ptr
1613 = _S_substring(this->_M_tree_ptr, 1, this->_M_tree_ptr->_M_size);
1614 _S_unref(__old);
1615 }
1616
1617 _CharT front() const
1618 {
1619 return _S_fetch(this->_M_tree_ptr, 0);
1620 }
1621
1622 void balance()
1623 {
1624 _RopeRep* __old = this->_M_tree_ptr;
1625 this->_M_tree_ptr = _S_balance(this->_M_tree_ptr);
1626 _S_unref(__old);
1627 }
1628
1629 void copy(_CharT* __buffer) const {
1630 _Destroy(__buffer, __buffer + size());
1631 _S_flatten(this->_M_tree_ptr, __buffer);
1632 }
1633
1634 // This is the copy function from the standard, but
1635 // with the arguments reordered to make it consistent with the
1636 // rest of the interface.
1637 // Note that this guaranteed not to compile if the draft standard
1638 // order is assumed.
1639 size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
1640 {
1641 size_t __size = size();
1642 size_t __len = (__pos + __n > __size? __size - __pos : __n);
1643
1644 _Destroy(__buffer, __buffer + __len);
1645 _S_flatten(this->_M_tree_ptr, __pos, __len, __buffer);
1646 return __len;
1647 }
1648
1649 // Print to stdout, exposing structure. May be useful for
1650 // performance debugging.
1651 void dump() {
1652 _S_dump(this->_M_tree_ptr);
1653 }
1654
1655 // Convert to 0 terminated string in new allocated memory.
1656 // Embedded 0s in the input do not terminate the copy.
1657 const _CharT* c_str() const;
1658
1659 // As above, but lso use the flattened representation as the
1660 // the new rope representation.
1661 const _CharT* replace_with_c_str();
1662
1663 // Reclaim memory for the c_str generated flattened string.
1664 // Intentionally undocumented, since it's hard to say when this
1665 // is safe for multiple threads.
1666 void delete_c_str () {
1667 if (0 == this->_M_tree_ptr) return;
1668 if (_Rope_constants::_S_leaf == this->_M_tree_ptr->_M_tag &&
1669 ((_RopeLeaf*)this->_M_tree_ptr)->_M_data ==
1670 this->_M_tree_ptr->_M_c_string) {
1671 // Representation shared
1672 return;
1673 }
1674 # ifndef __GC
1675 this->_M_tree_ptr->_M_free_c_string();
1676 # endif
1677 this->_M_tree_ptr->_M_c_string = 0;
1678 }
1679
1680 _CharT operator[] (size_type __pos) const {
1681 return _S_fetch(this->_M_tree_ptr, __pos);
1682 }
1683
1684 _CharT at(size_type __pos) const {
1685 // if (__pos >= size()) throw out_of_range; // XXX
1686 return (*this)[__pos];
1687 }
1688
1689 const_iterator begin() const {
1690 return(const_iterator(this->_M_tree_ptr, 0));
1691 }
1692
1693 // An easy way to get a const iterator from a non-const container.
1694 const_iterator const_begin() const {
1695 return(const_iterator(this->_M_tree_ptr, 0));
1696 }
1697
1698 const_iterator end() const {
1699 return(const_iterator(this->_M_tree_ptr, size()));
1700 }
1701
1702 const_iterator const_end() const {
1703 return(const_iterator(this->_M_tree_ptr, size()));
1704 }
1705
1706 size_type size() const {
1707 return(0 == this->_M_tree_ptr? 0 : this->_M_tree_ptr->_M_size);
1708 }
1709
1710 size_type length() const {
1711 return size();
1712 }
1713
1714 size_type max_size() const {
1715 return _S_min_len[_Rope_constants::_S_max_rope_depth - 1] - 1;
1716 // Guarantees that the result can be sufficirntly
1717 // balanced. Longer ropes will probably still work,
1718 // but it's harder to make guarantees.
1719 }
1720
1721 typedef reverse_iterator<const_iterator> const_reverse_iterator;
1722
1723 const_reverse_iterator rbegin() const {
1724 return const_reverse_iterator(end());
1725 }
1726
1727 const_reverse_iterator const_rbegin() const {
1728 return const_reverse_iterator(end());
1729 }
1730
1731 const_reverse_iterator rend() const {
1732 return const_reverse_iterator(begin());
1733 }
1734
1735 const_reverse_iterator const_rend() const {
1736 return const_reverse_iterator(begin());
1737 }
1738
1739 template<class _CharT2, class _Alloc2>
1740 friend rope<_CharT2,_Alloc2>
1741 operator+ (const rope<_CharT2,_Alloc2>& __left,
1742 const rope<_CharT2,_Alloc2>& __right);
1743
1744 template<class _CharT2, class _Alloc2>
1745 friend rope<_CharT2,_Alloc2>
1746 operator+ (const rope<_CharT2,_Alloc2>& __left,
1747 const _CharT2* __right);
1748
1749 template<class _CharT2, class _Alloc2>
1750 friend rope<_CharT2,_Alloc2>
1751 operator+ (const rope<_CharT2,_Alloc2>& __left, _CharT2 __right);
1752 // The symmetric cases are intentionally omitted, since they're presumed
1753 // to be less common, and we don't handle them as well.
1754
1755 // The following should really be templatized.
1756 // The first argument should be an input iterator or
1757 // forward iterator with value_type _CharT.
1758 rope& append(const _CharT* __iter, size_t __n) {
1759 _RopeRep* __result =
1760 _S_destr_concat_char_iter(this->_M_tree_ptr, __iter, __n);
1761 _S_unref(this->_M_tree_ptr);
1762 this->_M_tree_ptr = __result;
1763 return *this;
1764 }
1765
1766 rope& append(const _CharT* __c_string) {
1767 size_t __len = _S_char_ptr_len(__c_string);
1768 append(__c_string, __len);
1769 return(*this);
1770 }
1771
1772 rope& append(const _CharT* __s, const _CharT* __e) {
1773 _RopeRep* __result =
1774 _S_destr_concat_char_iter(this->_M_tree_ptr, __s, __e - __s);
1775 _S_unref(this->_M_tree_ptr);
1776 this->_M_tree_ptr = __result;
1777 return *this;
1778 }
1779
1780 rope& append(const_iterator __s, const_iterator __e) {
1781 _Self_destruct_ptr __appendee(_S_substring(
1782 __s._M_root, __s._M_current_pos, __e._M_current_pos));
1783 _RopeRep* __result =
1784 _S_concat(this->_M_tree_ptr, (_RopeRep*)__appendee);
1785 _S_unref(this->_M_tree_ptr);
1786 this->_M_tree_ptr = __result;
1787 return *this;
1788 }
1789
1790 rope& append(_CharT __c) {
1791 _RopeRep* __result =
1792 _S_destr_concat_char_iter(this->_M_tree_ptr, &__c, 1);
1793 _S_unref(this->_M_tree_ptr);
1794 this->_M_tree_ptr = __result;
1795 return *this;
1796 }
1797
1798 rope& append() { return append(_CharT()); } // XXX why?
1799
1800 rope& append(const rope& __y) {
1801 _RopeRep* __result = _S_concat(this->_M_tree_ptr, __y._M_tree_ptr);
1802 _S_unref(this->_M_tree_ptr);
1803 this->_M_tree_ptr = __result;
1804 return *this;
1805 }
1806
1807 rope& append(size_t __n, _CharT __c) {
1808 rope<_CharT,_Alloc> __last(__n, __c);
1809 return append(__last);
1810 }
1811
1812 void swap(rope& __b) {
1813 _RopeRep* __tmp = this->_M_tree_ptr;
1814 this->_M_tree_ptr = __b._M_tree_ptr;
1815 __b._M_tree_ptr = __tmp;
1816 }
1817
1818
1819 protected:
1820 // Result is included in refcount.
1821 static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1822 size_t __pos2, _RopeRep* __r) {
1823 if (0 == __old) { _S_ref(__r); return __r; }
1824 _Self_destruct_ptr __left(
1825 _S_substring(__old, 0, __pos1));
1826 _Self_destruct_ptr __right(
1827 _S_substring(__old, __pos2, __old->_M_size));
1828 _RopeRep* __result;
1829
1830 if (0 == __r) {
1831 __result = _S_concat(__left, __right);
1832 } else {
1833 _Self_destruct_ptr __left_result(_S_concat(__left, __r));
1834 __result = _S_concat(__left_result, __right);
1835 }
1836 return __result;
1837 }
1838
1839 public:
1840 void insert(size_t __p, const rope& __r) {
1841 _RopeRep* __result =
1842 replace(this->_M_tree_ptr, __p, __p, __r._M_tree_ptr);
1843 _S_unref(this->_M_tree_ptr);
1844 this->_M_tree_ptr = __result;
1845 }
1846
1847 void insert(size_t __p, size_t __n, _CharT __c) {
1848 rope<_CharT,_Alloc> __r(__n,__c);
1849 insert(__p, __r);
1850 }
1851
1852 void insert(size_t __p, const _CharT* __i, size_t __n) {
1853 _Self_destruct_ptr __left(_S_substring(this->_M_tree_ptr, 0, __p));
1854 _Self_destruct_ptr __right(_S_substring(this->_M_tree_ptr,
1855 __p, size()));
1856 _Self_destruct_ptr __left_result(
1857 _S_concat_char_iter(__left, __i, __n));
1858 // _S_ destr_concat_char_iter should be safe here.
1859 // But as it stands it's probably not a win, since __left
1860 // is likely to have additional references.
1861 _RopeRep* __result = _S_concat(__left_result, __right);
1862 _S_unref(this->_M_tree_ptr);
1863 this->_M_tree_ptr = __result;
1864 }
1865
1866 void insert(size_t __p, const _CharT* __c_string) {
1867 insert(__p, __c_string, _S_char_ptr_len(__c_string));
1868 }
1869
1870 void insert(size_t __p, _CharT __c) {
1871 insert(__p, &__c, 1);
1872 }
1873
1874 void insert(size_t __p) {
1875 _CharT __c = _CharT();
1876 insert(__p, &__c, 1);
1877 }
1878
1879 void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1880 rope __r(__i, __j);
1881 insert(__p, __r);
1882 }
1883
1884 void insert(size_t __p, const const_iterator& __i,
1885 const const_iterator& __j) {
1886 rope __r(__i, __j);
1887 insert(__p, __r);
1888 }
1889
1890 void insert(size_t __p, const iterator& __i,
1891 const iterator& __j) {
1892 rope __r(__i, __j);
1893 insert(__p, __r);
1894 }
1895
1896 // (position, length) versions of replace operations:
1897
1898 void replace(size_t __p, size_t __n, const rope& __r) {
1899 _RopeRep* __result =
1900 replace(this->_M_tree_ptr, __p, __p + __n, __r._M_tree_ptr);
1901 _S_unref(this->_M_tree_ptr);
1902 this->_M_tree_ptr = __result;
1903 }
1904
1905 void replace(size_t __p, size_t __n,
1906 const _CharT* __i, size_t __i_len) {
1907 rope __r(__i, __i_len);
1908 replace(__p, __n, __r);
1909 }
1910
1911 void replace(size_t __p, size_t __n, _CharT __c) {
1912 rope __r(__c);
1913 replace(__p, __n, __r);
1914 }
1915
1916 void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1917 rope __r(__c_string);
1918 replace(__p, __n, __r);
1919 }
1920
1921 void replace(size_t __p, size_t __n,
1922 const _CharT* __i, const _CharT* __j) {
1923 rope __r(__i, __j);
1924 replace(__p, __n, __r);
1925 }
1926
1927 void replace(size_t __p, size_t __n,
1928 const const_iterator& __i, const const_iterator& __j) {
1929 rope __r(__i, __j);
1930 replace(__p, __n, __r);
1931 }
1932
1933 void replace(size_t __p, size_t __n,
1934 const iterator& __i, const iterator& __j) {
1935 rope __r(__i, __j);
1936 replace(__p, __n, __r);
1937 }
1938
1939 // Single character variants:
1940 void replace(size_t __p, _CharT __c) {
1941 iterator __i(this, __p);
1942 *__i = __c;
1943 }
1944
1945 void replace(size_t __p, const rope& __r) {
1946 replace(__p, 1, __r);
1947 }
1948
1949 void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1950 replace(__p, 1, __i, __i_len);
1951 }
1952
1953 void replace(size_t __p, const _CharT* __c_string) {
1954 replace(__p, 1, __c_string);
1955 }
1956
1957 void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1958 replace(__p, 1, __i, __j);
1959 }
1960
1961 void replace(size_t __p, const const_iterator& __i,
1962 const const_iterator& __j) {
1963 replace(__p, 1, __i, __j);
1964 }
1965
1966 void replace(size_t __p, const iterator& __i,
1967 const iterator& __j) {
1968 replace(__p, 1, __i, __j);
1969 }
1970
1971 // Erase, (position, size) variant.
1972 void erase(size_t __p, size_t __n) {
1973 _RopeRep* __result = replace(this->_M_tree_ptr, __p, __p + __n, 0);
1974 _S_unref(this->_M_tree_ptr);
1975 this->_M_tree_ptr = __result;
1976 }
1977
1978 // Erase, single character
1979 void erase(size_t __p) {
1980 erase(__p, __p + 1);
1981 }
1982
1983 // Insert, iterator variants.
1984 iterator insert(const iterator& __p, const rope& __r)
1985 { insert(__p.index(), __r); return __p; }
1986 iterator insert(const iterator& __p, size_t __n, _CharT __c)
1987 { insert(__p.index(), __n, __c); return __p; }
1988 iterator insert(const iterator& __p, _CharT __c)
1989 { insert(__p.index(), __c); return __p; }
1990 iterator insert(const iterator& __p )
1991 { insert(__p.index()); return __p; }
1992 iterator insert(const iterator& __p, const _CharT* c_string)
1993 { insert(__p.index(), c_string); return __p; }
1994 iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
1995 { insert(__p.index(), __i, __n); return __p; }
1996 iterator insert(const iterator& __p, const _CharT* __i,
1997 const _CharT* __j)
1998 { insert(__p.index(), __i, __j); return __p; }
1999 iterator insert(const iterator& __p,
2000 const const_iterator& __i, const const_iterator& __j)
2001 { insert(__p.index(), __i, __j); return __p; }
2002 iterator insert(const iterator& __p,
2003 const iterator& __i, const iterator& __j)
2004 { insert(__p.index(), __i, __j); return __p; }
2005
2006 // Replace, range variants.
2007 void replace(const iterator& __p, const iterator& __q,
2008 const rope& __r)
2009 { replace(__p.index(), __q.index() - __p.index(), __r); }
2010 void replace(const iterator& __p, const iterator& __q, _CharT __c)
2011 { replace(__p.index(), __q.index() - __p.index(), __c); }
2012 void replace(const iterator& __p, const iterator& __q,
2013 const _CharT* __c_string)
2014 { replace(__p.index(), __q.index() - __p.index(), __c_string); }
2015 void replace(const iterator& __p, const iterator& __q,
2016 const _CharT* __i, size_t __n)
2017 { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
2018 void replace(const iterator& __p, const iterator& __q,
2019 const _CharT* __i, const _CharT* __j)
2020 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2021 void replace(const iterator& __p, const iterator& __q,
2022 const const_iterator& __i, const const_iterator& __j)
2023 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2024 void replace(const iterator& __p, const iterator& __q,
2025 const iterator& __i, const iterator& __j)
2026 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2027
2028 // Replace, iterator variants.
2029 void replace(const iterator& __p, const rope& __r)
2030 { replace(__p.index(), __r); }
2031 void replace(const iterator& __p, _CharT __c)
2032 { replace(__p.index(), __c); }
2033 void replace(const iterator& __p, const _CharT* __c_string)
2034 { replace(__p.index(), __c_string); }
2035 void replace(const iterator& __p, const _CharT* __i, size_t __n)
2036 { replace(__p.index(), __i, __n); }
2037 void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
2038 { replace(__p.index(), __i, __j); }
2039 void replace(const iterator& __p, const_iterator __i,
2040 const_iterator __j)
2041 { replace(__p.index(), __i, __j); }
2042 void replace(const iterator& __p, iterator __i, iterator __j)
2043 { replace(__p.index(), __i, __j); }
2044
2045 // Iterator and range variants of erase
2046 iterator erase(const iterator& __p, const iterator& __q) {
2047 size_t __p_index = __p.index();
2048 erase(__p_index, __q.index() - __p_index);
2049 return iterator(this, __p_index);
2050 }
2051 iterator erase(const iterator& __p) {
2052 size_t __p_index = __p.index();
2053 erase(__p_index, 1);
2054 return iterator(this, __p_index);
2055 }
2056
2057 rope substr(size_t __start, size_t __len = 1) const {
2058 return rope<_CharT,_Alloc>(
2059 _S_substring(this->_M_tree_ptr,
2060 __start,
2061 __start + __len));
2062 }
2063
2064 rope substr(iterator __start, iterator __end) const {
2065 return rope<_CharT,_Alloc>(
2066 _S_substring(this->_M_tree_ptr,
2067 __start.index(),
2068 __end.index()));
2069 }
2070
2071 rope substr(iterator __start) const {
2072 size_t __pos = __start.index();
2073 return rope<_CharT,_Alloc>(
2074 _S_substring(this->_M_tree_ptr, __pos, __pos + 1));
2075 }
2076
2077 rope substr(const_iterator __start, const_iterator __end) const {
2078 // This might eventually take advantage of the cache in the
2079 // iterator.
2080 return rope<_CharT,_Alloc>(
2081 _S_substring(this->_M_tree_ptr, __start.index(), __end.index()));
2082 }
2083
2084 rope<_CharT,_Alloc> substr(const_iterator __start) {
2085 size_t __pos = __start.index();
2086 return rope<_CharT,_Alloc>(
2087 _S_substring(this->_M_tree_ptr, __pos, __pos + 1));
2088 }
2089
2090 static const size_type npos;
2091
2092 size_type find(_CharT __c, size_type __pos = 0) const;
2093 size_type find(const _CharT* __s, size_type __pos = 0) const {
2094 size_type __result_pos;
2095 const_iterator __result =
2096 std::search(const_begin() + __pos, const_end(),
2097 __s, __s + _S_char_ptr_len(__s));
2098 __result_pos = __result.index();
2099 # ifndef __STL_OLD_ROPE_SEMANTICS
2100 if (__result_pos == size()) __result_pos = npos;
2101 # endif
2102 return __result_pos;
2103 }
2104
2105 iterator mutable_begin() {
2106 return(iterator(this, 0));
2107 }
2108
2109 iterator mutable_end() {
2110 return(iterator(this, size()));
2111 }
2112
2113 typedef reverse_iterator<iterator> reverse_iterator;
2114
2115 reverse_iterator mutable_rbegin() {
2116 return reverse_iterator(mutable_end());
2117 }
2118
2119 reverse_iterator mutable_rend() {
2120 return reverse_iterator(mutable_begin());
2121 }
2122
2123 reference mutable_reference_at(size_type __pos) {
2124 return reference(this, __pos);
2125 }
2126
2127 # ifdef __STD_STUFF
2128 reference operator[] (size_type __pos) {
2129 return _char_ref_proxy(this, __pos);
2130 }
2131
2132 reference at(size_type __pos) {
2133 // if (__pos >= size()) throw out_of_range; // XXX
2134 return (*this)[__pos];
2135 }
2136
2137 void resize(size_type __n, _CharT __c) {}
2138 void resize(size_type __n) {}
2139 void reserve(size_type __res_arg = 0) {}
2140 size_type capacity() const {
2141 return max_size();
2142 }
2143
2144 // Stuff below this line is dangerous because it's error prone.
2145 // I would really like to get rid of it.
2146 // copy function with funny arg ordering.
2147 size_type copy(_CharT* __buffer, size_type __n,
2148 size_type __pos = 0) const {
2149 return copy(__pos, __n, __buffer);
2150 }
2151
2152 iterator end() { return mutable_end(); }
2153
2154 iterator begin() { return mutable_begin(); }
2155
2156 reverse_iterator rend() { return mutable_rend(); }
2157
2158 reverse_iterator rbegin() { return mutable_rbegin(); }
2159
2160 # else
2161
2162 const_iterator end() { return const_end(); }
2163
2164 const_iterator begin() { return const_begin(); }
2165
2166 const_reverse_iterator rend() { return const_rend(); }
2167
2168 const_reverse_iterator rbegin() { return const_rbegin(); }
2169
2170 # endif
2171
2172 };
2173
2174 template <class _CharT, class _Alloc>
2175 const typename rope<_CharT, _Alloc>::size_type rope<_CharT, _Alloc>::npos =
2176 (size_type)(-1);
2177
2178 template <class _CharT, class _Alloc>
2179 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2180 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2181 return (__x._M_current_pos == __y._M_current_pos &&
2182 __x._M_root == __y._M_root);
2183 }
2184
2185 template <class _CharT, class _Alloc>
2186 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2187 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2188 return (__x._M_current_pos < __y._M_current_pos);
2189 }
2190
2191 template <class _CharT, class _Alloc>
2192 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2193 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2194 return !(__x == __y);
2195 }
2196
2197 template <class _CharT, class _Alloc>
2198 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2199 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2200 return __y < __x;
2201 }
2202
2203 template <class _CharT, class _Alloc>
2204 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2205 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2206 return !(__y < __x);
2207 }
2208
2209 template <class _CharT, class _Alloc>
2210 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2211 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2212 return !(__x < __y);
2213 }
2214
2215 template <class _CharT, class _Alloc>
2216 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
2217 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2218 return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2219 }
2220
2221 template <class _CharT, class _Alloc>
2222 inline _Rope_const_iterator<_CharT,_Alloc>
2223 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2224 return _Rope_const_iterator<_CharT,_Alloc>(
2225 __x._M_root, __x._M_current_pos - __n);
2226 }
2227
2228 template <class _CharT, class _Alloc>
2229 inline _Rope_const_iterator<_CharT,_Alloc>
2230 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2231 return _Rope_const_iterator<_CharT,_Alloc>(
2232 __x._M_root, __x._M_current_pos + __n);
2233 }
2234
2235 template <class _CharT, class _Alloc>
2236 inline _Rope_const_iterator<_CharT,_Alloc>
2237 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
2238 return _Rope_const_iterator<_CharT,_Alloc>(
2239 __x._M_root, __x._M_current_pos + __n);
2240 }
2241
2242 template <class _CharT, class _Alloc>
2243 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
2244 const _Rope_iterator<_CharT,_Alloc>& __y) {
2245 return (__x._M_current_pos == __y._M_current_pos &&
2246 __x._M_root_rope == __y._M_root_rope);
2247 }
2248
2249 template <class _CharT, class _Alloc>
2250 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
2251 const _Rope_iterator<_CharT,_Alloc>& __y) {
2252 return (__x._M_current_pos < __y._M_current_pos);
2253 }
2254
2255 template <class _CharT, class _Alloc>
2256 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
2257 const _Rope_iterator<_CharT,_Alloc>& __y) {
2258 return !(__x == __y);
2259 }
2260
2261 template <class _CharT, class _Alloc>
2262 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
2263 const _Rope_iterator<_CharT,_Alloc>& __y) {
2264 return __y < __x;
2265 }
2266
2267 template <class _CharT, class _Alloc>
2268 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
2269 const _Rope_iterator<_CharT,_Alloc>& __y) {
2270 return !(__y < __x);
2271 }
2272
2273 template <class _CharT, class _Alloc>
2274 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
2275 const _Rope_iterator<_CharT,_Alloc>& __y) {
2276 return !(__x < __y);
2277 }
2278
2279 template <class _CharT, class _Alloc>
2280 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2281 const _Rope_iterator<_CharT,_Alloc>& __y) {
2282 return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2283 }
2284
2285 template <class _CharT, class _Alloc>
2286 inline _Rope_iterator<_CharT,_Alloc>
2287 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2288 ptrdiff_t __n) {
2289 return _Rope_iterator<_CharT,_Alloc>(
2290 __x._M_root_rope, __x._M_current_pos - __n);
2291 }
2292
2293 template <class _CharT, class _Alloc>
2294 inline _Rope_iterator<_CharT,_Alloc>
2295 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
2296 ptrdiff_t __n) {
2297 return _Rope_iterator<_CharT,_Alloc>(
2298 __x._M_root_rope, __x._M_current_pos + __n);
2299 }
2300
2301 template <class _CharT, class _Alloc>
2302 inline _Rope_iterator<_CharT,_Alloc>
2303 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
2304 return _Rope_iterator<_CharT,_Alloc>(
2305 __x._M_root_rope, __x._M_current_pos + __n);
2306 }
2307
2308 template <class _CharT, class _Alloc>
2309 inline
2310 rope<_CharT,_Alloc>
2311 operator+ (const rope<_CharT,_Alloc>& __left,
2312 const rope<_CharT,_Alloc>& __right)
2313 {
2314 return rope<_CharT,_Alloc>(
2315 rope<_CharT,_Alloc>::_S_concat(__left._M_tree_ptr, __right._M_tree_ptr));
2316 // Inlining this should make it possible to keep __left and
2317 // __right in registers.
2318 }
2319
2320 template <class _CharT, class _Alloc>
2321 inline
2322 rope<_CharT,_Alloc>&
2323 operator+= (rope<_CharT,_Alloc>& __left,
2324 const rope<_CharT,_Alloc>& __right)
2325 {
2326 __left.append(__right);
2327 return __left;
2328 }
2329
2330 template <class _CharT, class _Alloc>
2331 inline
2332 rope<_CharT,_Alloc>
2333 operator+ (const rope<_CharT,_Alloc>& __left,
2334 const _CharT* __right) {
2335 size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
2336 return rope<_CharT,_Alloc>(
2337 rope<_CharT,_Alloc>::_S_concat_char_iter(
2338 __left._M_tree_ptr, __right, __rlen));
2339 }
2340
2341 template <class _CharT, class _Alloc>
2342 inline
2343 rope<_CharT,_Alloc>&
2344 operator+= (rope<_CharT,_Alloc>& __left,
2345 const _CharT* __right) {
2346 __left.append(__right);
2347 return __left;
2348 }
2349
2350 template <class _CharT, class _Alloc>
2351 inline
2352 rope<_CharT,_Alloc>
2353 operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
2354 return rope<_CharT,_Alloc>(
2355 rope<_CharT,_Alloc>::_S_concat_char_iter(
2356 __left._M_tree_ptr, &__right, 1));
2357 }
2358
2359 template <class _CharT, class _Alloc>
2360 inline
2361 rope<_CharT,_Alloc>&
2362 operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
2363 __left.append(__right);
2364 return __left;
2365 }
2366
2367 template <class _CharT, class _Alloc>
2368 bool
2369 operator< (const rope<_CharT,_Alloc>& __left,
2370 const rope<_CharT,_Alloc>& __right) {
2371 return __left.compare(__right) < 0;
2372 }
2373
2374 template <class _CharT, class _Alloc>
2375 bool
2376 operator== (const rope<_CharT,_Alloc>& __left,
2377 const rope<_CharT,_Alloc>& __right) {
2378 return __left.compare(__right) == 0;
2379 }
2380
2381 template <class _CharT, class _Alloc>
2382 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2383 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2384 return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
2385 }
2386
2387 template <class _CharT, class _Alloc>
2388 inline bool
2389 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2390 return !(__x == __y);
2391 }
2392
2393 template <class _CharT, class _Alloc>
2394 inline bool
2395 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2396 return __y < __x;
2397 }
2398
2399 template <class _CharT, class _Alloc>
2400 inline bool
2401 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2402 return !(__y < __x);
2403 }
2404
2405 template <class _CharT, class _Alloc>
2406 inline bool
2407 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2408 return !(__x < __y);
2409 }
2410
2411 template <class _CharT, class _Alloc>
2412 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2413 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2414 return !(__x == __y);
2415 }
2416
2417 template<class _CharT, class _Traits, class _Alloc>
2418 std::basic_ostream<_CharT, _Traits>& operator<<
2419 (std::basic_ostream<_CharT, _Traits>& __o,
2420 const rope<_CharT, _Alloc>& __r);
2421
2422 typedef rope<char> crope;
2423 typedef rope<wchar_t> wrope;
2424
2425 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
2426 {
2427 return __c.mutable_reference_at(__i);
2428 }
2429
2430 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
2431 {
2432 return __c.mutable_reference_at(__i);
2433 }
2434
2435 template <class _CharT, class _Alloc>
2436 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
2437 __x.swap(__y);
2438 }
2439
2440 // Hash functions should probably be revisited later:
2441 template<> struct hash<crope>
2442 {
2443 size_t operator()(const crope& __str) const
2444 {
2445 size_t __size = __str.size();
2446
2447 if (0 == __size) return 0;
2448 return 13*__str[0] + 5*__str[__size - 1] + __size;
2449 }
2450 };
2451
2452
2453 template<> struct hash<wrope>
2454 {
2455 size_t operator()(const wrope& __str) const
2456 {
2457 size_t __size = __str.size();
2458
2459 if (0 == __size) return 0;
2460 return 13*__str[0] + 5*__str[__size - 1] + __size;
2461 }
2462 };
2463
2464 } // namespace __gnu_cxx
2465
2466 # include <ext/ropeimpl.h>
2467
2468 #endif