summaryrefslogtreecommitdiffstats
path: root/contrib/libstdc++/stl/stl_rope.h
diff options
context:
space:
mode:
authorobrien <obrien@FreeBSD.org>1999-10-16 03:52:48 +0000
committerobrien <obrien@FreeBSD.org>1999-10-16 03:52:48 +0000
commitb721bc1aede3b3211302d103a1de1019c732ce74 (patch)
tree0373fc465a78f12f63d0f0e1487af637156b8a58 /contrib/libstdc++/stl/stl_rope.h
parent9f01c491d0571ee2f91980be244eaeef54bef145 (diff)
downloadFreeBSD-src-b721bc1aede3b3211302d103a1de1019c732ce74.zip
FreeBSD-src-b721bc1aede3b3211302d103a1de1019c732ce74.tar.gz
Virgin import of GCC 2.95.1's libstdc++
Diffstat (limited to 'contrib/libstdc++/stl/stl_rope.h')
-rw-r--r--contrib/libstdc++/stl/stl_rope.h3821
1 files changed, 2125 insertions, 1696 deletions
diff --git a/contrib/libstdc++/stl/stl_rope.h b/contrib/libstdc++/stl/stl_rope.h
index d37c679..44f51ae 100644
--- a/contrib/libstdc++/stl/stl_rope.h
+++ b/contrib/libstdc++/stl/stl_rope.h
@@ -15,6 +15,12 @@
* You should not attempt to use it directly.
*/
+// rope<_CharT,_Alloc> is a sequence of _CharT.
+// Ropes appear to be mutable, but update operations
+// really copy enough of the data structure to leave the original
+// valid. Thus ropes can be logically copied by just copying
+// a pointer value.
+
#ifndef __SGI_STL_INTERNAL_ROPE_H
# define __SGI_STL_INTERNAL_ROPE_H
@@ -33,49 +39,47 @@ __STL_BEGIN_NAMESPACE
#pragma set woff 1174
#endif
+// The _S_eos function is used for those functions that
+// convert to/from C-like strings to detect the end of the string.
+
// The end-of-C-string character.
// This is what the draft standard says it should be.
-template <class charT>
-inline charT __eos(charT*) { return charT(); }
+template <class _CharT>
+inline _CharT _S_eos(_CharT*) { return _CharT(); }
// Test for basic character types.
// For basic character types leaves having a trailing eos.
-template <class charT>
-inline bool __is_basic_char_type(charT *) { return false; }
-template <class charT>
-inline bool __is_one_byte_char_type(charT *) { return false; }
-
-inline bool __is_basic_char_type(char *) { return true; }
-inline bool __is_one_byte_char_type(char *) { return true; }
-inline bool __is_basic_char_type(wchar_t *) { return true; }
-
-// Store an eos iff charT is a basic character type.
-// Do not reference __eos if it isn't.
-template <class charT>
-inline void __cond_store_eos(charT&) {}
-
-inline void __cond_store_eos(char& c) { c = 0; }
-inline void __cond_store_eos(wchar_t& c) { c = 0; }
-
-
-// rope<charT,Alloc> is a sequence of charT.
-// Ropes appear to be mutable, but update operations
-// really copy enough of the data structure to leave the original
-// valid. Thus ropes can be logically copied by just copying
-// a pointer value.
-// The __eos function is used for those functions that
-// convert to/from C-like strings to detect the end of the string.
-// __compare is used as the character comparison function.
-template <class charT>
+template <class _CharT>
+inline bool _S_is_basic_char_type(_CharT*) { return false; }
+template <class _CharT>
+inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
+
+inline bool _S_is_basic_char_type(char*) { return true; }
+inline bool _S_is_one_byte_char_type(char*) { return true; }
+inline bool _S_is_basic_char_type(wchar_t*) { return true; }
+
+// Store an eos iff _CharT is a basic character type.
+// Do not reference _S_eos if it isn't.
+template <class _CharT>
+inline void _S_cond_store_eos(_CharT&) {}
+
+inline void _S_cond_store_eos(char& __c) { __c = 0; }
+inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
+
+// char_producers are logically functions that generate a section of
+// a string. These can be convereted to ropes. The resulting rope
+// invokes the char_producer on demand. This allows, for example,
+// files to be viewed as ropes without reading the entire file.
+template <class _CharT>
class char_producer {
public:
- virtual ~char_producer() {};
- virtual void operator()(size_t start_pos, size_t len, charT* buffer)
- = 0;
- // Buffer should really be an arbitrary output iterator.
- // That way we could flatten directly into an ostream, etc.
- // This is thoroughly impossible, since iterator types don't
- // have runtime descriptions.
+ virtual ~char_producer() {};
+ virtual void operator()(size_t __start_pos, size_t __len,
+ _CharT* __buffer) = 0;
+ // Buffer should really be an arbitrary output iterator.
+ // That way we could flatten directly into an ostream, etc.
+ // This is thoroughly impossible, since iterator types don't
+ // have runtime descriptions.
};
// Sequence buffers:
@@ -92,111 +96,112 @@ class char_producer {
// behave a little like basic_ostringstream<sequence::value_type> and a
// little like containers.
-template<class sequence, size_t buf_sz = 100
+template<class _Sequence, size_t _Buf_sz = 100
# if defined(__sgi) && !defined(__GNUC__)
-# define __TYPEDEF_WORKAROUND
- ,class v = typename sequence::value_type
+# define __TYPEDEF_WORKAROUND
+ ,class _V = typename _Sequence::value_type
# endif
>
// The 3rd parameter works around a common compiler bug.
class sequence_buffer : public output_iterator {
public:
# ifndef __TYPEDEF_WORKAROUND
- typedef typename sequence::value_type value_type;
-# else
- typedef v value_type;
-# endif
+ typedef typename _Sequence::value_type value_type;
+# else
+ typedef _V value_type;
+# endif
protected:
- sequence *prefix;
- value_type buffer[buf_sz];
- size_t buf_count;
+ _Sequence* _M_prefix;
+ value_type _M_buffer[_Buf_sz];
+ size_t _M_buf_count;
public:
- void flush() {
- prefix->append(buffer, buffer + buf_count);
- buf_count = 0;
- }
- ~sequence_buffer() { flush(); }
- sequence_buffer() : prefix(0), buf_count(0) {}
- sequence_buffer(const sequence_buffer & x) {
- prefix = x.prefix;
- buf_count = x.buf_count;
- copy(x.buffer, x.buffer + x.buf_count, buffer);
- }
- sequence_buffer(sequence_buffer & x) {
- x.flush();
- prefix = x.prefix;
- buf_count = 0;
- }
- sequence_buffer(sequence& s) : prefix(&s), buf_count(0) {}
- sequence_buffer& operator= (sequence_buffer& x) {
- x.flush();
- prefix = x.prefix;
- buf_count = 0;
- return *this;
- }
- sequence_buffer& operator= (const sequence_buffer& x) {
- prefix = x.prefix;
- buf_count = x.buf_count;
- copy(x.buffer, x.buffer + x.buf_count, buffer);
- return *this;
- }
- void push_back(value_type x)
- {
- if (buf_count < buf_sz) {
- buffer[buf_count] = x;
- ++buf_count;
- } else {
- flush();
- buffer[0] = x;
- buf_count = 1;
- }
- }
- void append(value_type *s, size_t len)
- {
- if (len + buf_count <= buf_sz) {
- size_t i, j;
- for (i = buf_count, j = 0; j < len; i++, j++) {
- buffer[i] = s[j];
- }
- buf_count += len;
- } else if (0 == buf_count) {
- prefix->append(s, s + len);
- } else {
- flush();
- append(s, len);
- }
- }
- sequence_buffer& write(value_type *s, size_t len)
- {
- append(s, len);
- return *this;
- }
- sequence_buffer& put(value_type x)
- {
- push_back(x);
- return *this;
- }
- sequence_buffer& operator=(const value_type& rhs)
- {
- push_back(rhs);
- return *this;
- }
- sequence_buffer& operator*() { return *this; }
- sequence_buffer& operator++() { return *this; }
- sequence_buffer& operator++(int) { return *this; }
+ void flush() {
+ _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
+ _M_buf_count = 0;
+ }
+ ~sequence_buffer() { flush(); }
+ sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
+ sequence_buffer(const sequence_buffer& __x) {
+ _M_prefix = __x._M_prefix;
+ _M_buf_count = __x._M_buf_count;
+ copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
+ }
+ sequence_buffer(sequence_buffer& __x) {
+ __x.flush();
+ _M_prefix = __x._M_prefix;
+ _M_buf_count = 0;
+ }
+ sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
+ sequence_buffer& operator= (sequence_buffer& __x) {
+ __x.flush();
+ _M_prefix = __x._M_prefix;
+ _M_buf_count = 0;
+ return *this;
+ }
+ sequence_buffer& operator= (const sequence_buffer& __x) {
+ _M_prefix = __x._M_prefix;
+ _M_buf_count = __x._M_buf_count;
+ copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
+ return *this;
+ }
+ void push_back(value_type __x)
+ {
+ if (_M_buf_count < _Buf_sz) {
+ _M_buffer[_M_buf_count] = __x;
+ ++_M_buf_count;
+ } else {
+ flush();
+ _M_buffer[0] = __x;
+ _M_buf_count = 1;
+ }
+ }
+ void append(value_type* __s, size_t __len)
+ {
+ if (__len + _M_buf_count <= _Buf_sz) {
+ size_t __i = _M_buf_count;
+ size_t __j = 0;
+ for (; __j < __len; __i++, __j++) {
+ _M_buffer[__i] = __s[__j];
+ }
+ _M_buf_count += __len;
+ } else if (0 == _M_buf_count) {
+ _M_prefix->append(__s, __s + __len);
+ } else {
+ flush();
+ append(__s, __len);
+ }
+ }
+ sequence_buffer& write(value_type* __s, size_t __len)
+ {
+ append(__s, __len);
+ return *this;
+ }
+ sequence_buffer& put(value_type __x)
+ {
+ push_back(__x);
+ return *this;
+ }
+ sequence_buffer& operator=(const value_type& __rhs)
+ {
+ push_back(__rhs);
+ return *this;
+ }
+ sequence_buffer& operator*() { return *this; }
+ sequence_buffer& operator++() { return *this; }
+ sequence_buffer& operator++(int) { return *this; }
};
// The following should be treated as private, at least for now.
-template<class charT>
-class __rope_char_consumer {
+template<class _CharT>
+class _Rope_char_consumer {
public:
- // If we had member templates, these should not be virtual.
- // For now we need to use run-time parametrization where
- // compile-time would do. Hence this should all be private
- // for now.
- // The symmetry with char_producer is accidental and temporary.
- virtual ~__rope_char_consumer() {};
- virtual bool operator()(const charT* buffer, size_t len) = 0;
+ // If we had member templates, these should not be virtual.
+ // For now we need to use run-time parametrization where
+ // compile-time would do. _Hence this should all be private
+ // for now.
+ // The symmetry with char_producer is accidental and temporary.
+ virtual ~_Rope_char_consumer() {};
+ virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
};
//
@@ -205,22 +210,22 @@ class __rope_char_consumer {
// equality on rope iterators. According to the draft standard, the
// template parameters for such an equality operator cannot be inferred
// from the occurence of a member class as a parameter.
-// (SGI compilers in fact allow this, but the result wouldn't be
+// (SGI compilers in fact allow this, but the __result wouldn't be
// portable.)
// Similarly, some of the static member functions are member functions
// only to avoid polluting the global namespace, and to circumvent
// restrictions on type inference for template functions.
//
-template<class CharT, class Alloc=__ALLOC> class rope;
-template<class CharT, class Alloc> struct __rope_RopeConcatenation;
-template<class CharT, class Alloc> struct __rope_RopeLeaf;
-template<class CharT, class Alloc> struct __rope_RopeFunction;
-template<class CharT, class Alloc> struct __rope_RopeSubstring;
-template<class CharT, class Alloc> class __rope_iterator;
-template<class CharT, class Alloc> class __rope_const_iterator;
-template<class CharT, class Alloc> class __rope_charT_ref_proxy;
-template<class CharT, class Alloc> class __rope_charT_ptr_proxy;
+template<class _CharT, class _Alloc=__STL_DEFAULT_ALLOCATOR(_CharT)> class rope;
+template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
+template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
+template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
+template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
+template<class _CharT, class _Alloc> class _Rope_iterator;
+template<class _CharT, class _Alloc> class _Rope_const_iterator;
+template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
+template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
//
// The internal data structure for representing a rope. This is
@@ -228,273 +233,488 @@ template<class CharT, class Alloc> class __rope_charT_ptr_proxy;
// to one of these.
//
// A few basic functions for manipulating this data structure
-// are members of RopeBase. Most of the more complex algorithms
+// are members of _RopeRep. Most of the more complex algorithms
// are implemented as rope members.
//
-// Some of the static member functions of RopeBase have identically
-// named functions in rope that simply invoke the RopeBase versions.
+// Some of the static member functions of _RopeRep have identically
+// named functions in rope that simply invoke the _RopeRep versions.
//
+// A macro to introduce various allocation and deallocation functions
+// These need to be defined differently depending on whether or not
+// we are using standard conforming allocators, and whether the allocator
+// instances have real state. Thus this macro is invoked repeatedly
+// with different definitions of __ROPE_DEFINE_ALLOC.
+// __ROPE_DEFINE_ALLOC(type,name) defines
+// type * name_allocate(size_t) and
+// void name_deallocate(tipe *, size_t)
+// Both functions may or may not be static.
+
+#define __ROPE_DEFINE_ALLOCS(__a) \
+ __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \
+ typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
+ __ROPE_DEFINE_ALLOC(__C,_C) \
+ typedef _Rope_RopeLeaf<_CharT,__a> __L; \
+ __ROPE_DEFINE_ALLOC(__L,_L) \
+ typedef _Rope_RopeFunction<_CharT,__a> __F; \
+ __ROPE_DEFINE_ALLOC(__F,_F) \
+ typedef _Rope_RopeSubstring<_CharT,__a> __S; \
+ __ROPE_DEFINE_ALLOC(__S,_S)
+
+// Internal rope nodes potentially store a copy of the allocator
+// instance used to allocate them. This is mostly redundant.
+// But the alternative would be to pass allocator instances around
+// in some form to nearly all internal functions, since any pointer
+// assignment may result in a zero reference count and thus require
+// deallocation.
+// The _Rope_rep_base class encapsulates
+// the differences between SGI-style allocators and standard-conforming
+// allocators.
+
+#ifdef __STL_USE_STD_ALLOCATORS
+
+#define __STATIC_IF_SGI_ALLOC /* not static */
+
+// Base class for ordinary allocators.
+template <class _CharT, class _Allocator, bool _IsStatic>
+class _Rope_rep_alloc_base {
+public:
+ typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
+ allocator_type;
+ allocator_type get_allocator() const { return _M_data_allocator; }
+ _Rope_rep_alloc_base(size_t __size, const allocator_type& __a)
+ : _M_size(__size), _M_data_allocator(__a) {}
+ size_t _M_size; // This is here only to avoid wasting space
+ // for an otherwise empty base class.
+
+
+protected:
+ allocator_type _M_data_allocator;
+
+# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
+ typedef typename \
+ _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
+ /*static*/ _Tp * __name##_allocate(size_t __n) \
+ { return __name##Allocator(_M_data_allocator).allocate(__n); } \
+ void __name##_deallocate(_Tp* __p, size_t __n) \
+ { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
+ __ROPE_DEFINE_ALLOCS(_Allocator);
+# undef __ROPE_DEFINE_ALLOC
+};
+
+// Specialization for allocators that have the property that we don't
+// actually have to store an allocator object.
+template <class _CharT, class _Allocator>
+class _Rope_rep_alloc_base<_CharT,_Allocator,true> {
+public:
+ typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
+ allocator_type;
+ allocator_type get_allocator() const { return allocator_type(); }
+ _Rope_rep_alloc_base(size_t __size, const allocator_type&)
+ : _M_size(__size) {}
+ size_t _M_size;
+
+protected:
+
+# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
+ typedef typename \
+ _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
+ typedef typename \
+ _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
+ static _Tp* __name##_allocate(size_t __n) \
+ { return __name##Alloc::allocate(__n); } \
+ void __name##_deallocate(_Tp *__p, size_t __n) \
+ { __name##Alloc::deallocate(__p, __n); }
+ __ROPE_DEFINE_ALLOCS(_Allocator);
+# undef __ROPE_DEFINE_ALLOC
+};
+
+template <class _CharT, class _Alloc>
+struct _Rope_rep_base
+ : public _Rope_rep_alloc_base<_CharT,_Alloc,
+ _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
+{
+ typedef _Rope_rep_alloc_base<_CharT,_Alloc,
+ _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
+ _Base;
+ typedef typename _Base::allocator_type allocator_type;
+ _Rope_rep_base(size_t __size, const allocator_type& __a)
+ : _Base(__size, __a) {}
+};
+
+#else /* !__STL_USE_STD_ALLOCATORS */
+
+#define __STATIC_IF_SGI_ALLOC static
+
+template <class _CharT, class _Alloc>
+class _Rope_rep_base {
+public:
+ typedef _Alloc allocator_type;
+ static allocator_type get_allocator() { return allocator_type(); }
+ _Rope_rep_base(size_t __size, const allocator_type&) : _M_size(__size) {}
+ size_t _M_size;
+
+protected:
+
+# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
+ typedef simple_alloc<_Tp, _Alloc> __name##Alloc; \
+ static _Tp* __name##_allocate(size_t __n) \
+ { return __name##Alloc::allocate(__n); } \
+ static void __name##_deallocate(_Tp* __p, size_t __n) \
+ { __name##Alloc::deallocate(__p, __n); }
+ __ROPE_DEFINE_ALLOCS(_Alloc);
+# undef __ROPE_DEFINE_ALLOC
+};
+
+#endif /* __STL_USE_STD_ALLOCATORS */
-template<class charT, class Alloc>
-struct __rope_RopeBase {
- typedef rope<charT,Alloc> my_rope;
- typedef simple_alloc<charT, Alloc> DataAlloc;
- typedef simple_alloc<__rope_RopeConcatenation<charT,Alloc>, Alloc> CAlloc;
- typedef simple_alloc<__rope_RopeLeaf<charT,Alloc>, Alloc> LAlloc;
- typedef simple_alloc<__rope_RopeFunction<charT,Alloc>, Alloc> FAlloc;
- typedef simple_alloc<__rope_RopeSubstring<charT,Alloc>, Alloc> SAlloc;
+
+template<class _CharT, class _Alloc>
+struct _Rope_RopeRep : public _Rope_rep_base<_CharT,_Alloc> {
public:
- enum { max_rope_depth = 45 };
- enum {leaf, concat, substringfn, function} tag:8;
- bool is_balanced:8;
- unsigned char depth;
- size_t size;
- __GC_CONST charT * c_string;
- /* Flattened version of string, if needed. */
- /* typically 0. */
- /* If it's not 0, then the memory is owned */
- /* by this node. */
- /* In the case of a leaf, this may point to */
- /* the same memory as the data field. */
+ enum { _S_max_rope_depth = 45 };
+ enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
+ _Tag _M_tag:8;
+ bool _M_is_balanced:8;
+ unsigned char _M_depth;
+ __GC_CONST _CharT* _M_c_string;
+ /* Flattened version of string, if needed. */
+ /* typically 0. */
+ /* If it's not 0, then the memory is owned */
+ /* by this node. */
+ /* In the case of a leaf, this may point to */
+ /* the same memory as the data field. */
+ typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
+ _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t __size,
+ allocator_type __a)
+ : _M_tag(__t), _M_depth(__d), _M_is_balanced(__b), _M_c_string(0),
+ _Rope_rep_base<_CharT,_Alloc>(__size, __a)
+ {
+# ifndef __GC
+ _M_refcount = 1;
+ _M_init_refcount_lock();
+# endif
+ }
# ifndef __GC
# if defined(__STL_WIN32THREADS)
- long refcount; // InterlockedIncrement wants a long *
-# else
- size_t refcount;
-# endif
- // We count references from rope instances
- // and references from other rope nodes. We
- // do not count const_iterator references.
- // Iterator references are counted so that rope modifications
- // can be detected after the fact.
- // Generally function results are counted, i.e.
- // a pointer returned by a function is included at the
- // point at which the pointer is returned.
- // The recipient should decrement the count if the
- // result is not needed.
- // Generally function arguments are not reflected
- // in the reference count. The callee should increment
- // the count before saving the argument someplace that
- // will outlive the call.
+ long _M_refcount; // InterlockedIncrement wants a long *
+# else
+ size_t _M_refcount;
+# endif
+ // We count references from rope instances
+ // and references from other rope nodes. We
+ // do not count const_iterator references.
+ // Iterator references are counted so that rope modifications
+ // can be detected after the fact.
+ // Generally function results are counted, i.__e.
+ // a pointer returned by a function is included at the
+ // point at which the pointer is returned.
+ // The recipient should decrement the count if the
+ // __result is not needed.
+ // Generally function arguments are not reflected
+ // in the reference count. The callee should increment
+ // the count before saving the argument someplace that
+ // will outlive the call.
# endif
# ifndef __GC
# ifdef __STL_SGI_THREADS
- // Reference counting with multiple threads and no
- // hardware or thread package support is pretty awful.
- // Mutexes are normally too expensive.
- // We'll assume a COMPARE_AND_SWAP(destp, old, new)
- // operation, which might be cheaper.
+ // Reference counting with multiple threads and no
+ // hardware or thread package support is pretty awful.
+ // Mutexes are normally too expensive.
+ // We'll assume a COMPARE_AND_SWAP(destp, __old, new)
+ // operation, which might be cheaper.
# if __mips < 3 || !(defined (_ABIN32) || defined(_ABI64))
-# define __add_and_fetch(l,v) add_then_test((unsigned long *)l,v)
+# define __add_and_fetch(l,v) add_then_test((unsigned long*)l,v)
# endif
- void init_refcount_lock() {}
- void incr_refcount ()
- {
- __add_and_fetch(&refcount, 1);
- }
- size_t decr_refcount ()
- {
- return __add_and_fetch(&refcount, (size_t)(-1));
- }
+ void _M_init_refcount_lock() {}
+ void _M_incr_refcount ()
+ {
+ __add_and_fetch(&_M_refcount, 1);
+ }
+ size_t _M_decr_refcount ()
+ {
+ return __add_and_fetch(&_M_refcount, (size_t)(-1));
+ }
# elif defined(__STL_WIN32THREADS)
- void init_refcount_lock() {}
- void incr_refcount ()
+ void _M_init_refcount_lock() {}
+ void _M_incr_refcount ()
{
- InterlockedIncrement(&refcount);
+ InterlockedIncrement(&_M_refcount);
}
- size_t decr_refcount ()
+ size_t _M_decr_refcount ()
{
- return InterlockedDecrement(&refcount);
+ return InterlockedDecrement(&_M_refcount);
}
# elif defined(__STL_PTHREADS)
- // This should be portable, but performance is expected
- // to be quite awful. This really needs platform specific
- // code.
- pthread_mutex_t refcount_lock;
- void init_refcount_lock() {
- pthread_mutex_init(&refcount_lock, 0);
- }
- void incr_refcount ()
+ // This should be portable, but performance is expected
+ // to be quite awful. This really needs platform specific
+ // code.
+ pthread_mutex_t _M_refcount_lock;
+ void _M_init_refcount_lock() {
+ pthread_mutex_init(&_M_refcount_lock, 0);
+ }
+ void _M_incr_refcount ()
{
- pthread_mutex_lock(&refcount_lock);
- ++refcount;
- pthread_mutex_unlock(&refcount_lock);
+ pthread_mutex_lock(&_M_refcount_lock);
+ ++_M_refcount;
+ pthread_mutex_unlock(&_M_refcount_lock);
}
- size_t decr_refcount ()
+ size_t _M_decr_refcount ()
{
- size_t result;
- pthread_mutex_lock(&refcount_lock);
- result = --refcount;
- pthread_mutex_unlock(&refcount_lock);
- return result;
+ size_t __result;
+ pthread_mutex_lock(&_M_refcount_lock);
+ __result = --_M_refcount;
+ pthread_mutex_unlock(&_M_refcount_lock);
+ return __result;
+ }
+# else
+ void _M_init_refcount_lock() {}
+ void _M_incr_refcount ()
+ {
+ ++_M_refcount;
+ }
+ size_t _M_decr_refcount ()
+ {
+ --_M_refcount;
+ return _M_refcount;
}
-# else
- void init_refcount_lock() {}
- void incr_refcount ()
- {
- ++refcount;
- }
- size_t decr_refcount ()
- {
- --refcount;
- return refcount;
- }
# endif
# else
- void incr_refcount () {}
+ void _M_incr_refcount () {}
+# endif
+# ifdef __STL_USE_STD_ALLOCATORS
+ static void _S_free_string(__GC_CONST _CharT*, size_t __len,
+ allocator_type __a);
+# define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);
+# else
+ static void _S_free_string(__GC_CONST _CharT*, size_t __len);
+# define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l);
# endif
- static void free_string(charT *, size_t len);
- // Deallocate data section of a leaf.
- // This shouldn't be a member function.
- // But its hard to do anything else at the
- // moment, because it's templatized w.r.t.
- // an allocator.
- // Does nothing if __GC is defined.
+ // Deallocate data section of a leaf.
+ // This shouldn't be a member function.
+ // But its hard to do anything else at the
+ // moment, because it's templatized w.r.t.
+ // an allocator.
+ // Does nothing if __GC is defined.
# ifndef __GC
- void free_c_string();
- void free_tree();
- // Deallocate t. Assumes t is not 0.
- void unref_nonnil()
- {
- if (0 == decr_refcount()) free_tree();
- }
- void ref_nonnil()
- {
- incr_refcount();
- }
- static void unref(__rope_RopeBase* t)
- {
- if (0 != t) {
- t -> unref_nonnil();
- }
- }
- static void ref(__rope_RopeBase* t)
- {
- if (0 != t) t -> incr_refcount();
- }
- static void free_if_unref(__rope_RopeBase* t)
- {
- if (0 != t && 0 == t -> refcount) t -> free_tree();
- }
+ void _M_free_c_string();
+ void _M_free_tree();
+ // Deallocate t. Assumes t is not 0.
+ void _M_unref_nonnil()
+ {
+ if (0 == _M_decr_refcount()) _M_free_tree();
+ }
+ void _M_ref_nonnil()
+ {
+ _M_incr_refcount();
+ }
+ static void _S_unref(_Rope_RopeRep* __t)
+ {
+ if (0 != __t) {
+ __t->_M_unref_nonnil();
+ }
+ }
+ static void _S_ref(_Rope_RopeRep* __t)
+ {
+ if (0 != __t) __t->_M_incr_refcount();
+ }
+ static void _S_free_if_unref(_Rope_RopeRep* __t)
+ {
+ if (0 != __t && 0 == __t->_M_refcount) __t->_M_free_tree();
+ }
# else /* __GC */
- void unref_nonnil() {}
- void ref_nonnil() {}
- static void unref(__rope_RopeBase* t) {}
- static void ref(__rope_RopeBase* t) {}
- static void fn_finalization_proc(void * tree, void *);
- static void free_if_unref(__rope_RopeBase* t) {}
+ void _M_unref_nonnil() {}
+ void _M_ref_nonnil() {}
+ static void _S_unref(_Rope_RopeRep*) {}
+ static void _S_ref(_Rope_RopeRep*) {}
+ static void _S_free_if_unref(_Rope_RopeRep*) {}
# endif
+};
+
+template<class _CharT, class _Alloc>
+struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
+ public:
+ // Apparently needed by VC++
// The data fields of leaves are allocated with some
// extra space, to accomodate future growth and for basic
// character types, to hold a trailing eos character.
- enum { alloc_granularity = 8 };
- static size_t rounded_up_size(size_t n) {
- size_t size_with_eos;
-
- if (__is_basic_char_type((charT *)0)) {
- size_with_eos = n + 1;
- } else {
- size_with_eos = n;
- }
+ enum { _S_alloc_granularity = 8 };
+ static size_t _S_rounded_up_size(size_t __n) {
+ size_t __size_with_eos;
+
+ if (_S_is_basic_char_type((_CharT*)0)) {
+ __size_with_eos = __n + 1;
+ } else {
+ __size_with_eos = __n;
+ }
# ifdef __GC
- return size_with_eos;
-# else
- // Allow slop for in-place expansion.
- return (size_with_eos + alloc_granularity-1)
- &~ (alloc_granularity-1);
-# endif
+ return __size_with_eos;
+# else
+ // Allow slop for in-place expansion.
+ return (__size_with_eos + _S_alloc_granularity-1)
+ &~ (_S_alloc_granularity-1);
+# endif
}
+ __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
+ /* The allocated size is */
+ /* _S_rounded_up_size(size), except */
+ /* in the GC case, in which it */
+ /* doesn't matter. */
+ typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
+ _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t __size, allocator_type __a)
+ : _M_data(__d)
+ , _Rope_RopeRep<_CharT,_Alloc>(_S_leaf, 0, true, __size, __a)
+ {
+ __stl_assert(__size > 0);
+ if (_S_is_basic_char_type((_CharT *)0)) {
+ // already eos terminated.
+ _M_c_string = __d;
+ }
+ }
+ // The constructor assumes that d has been allocated with
+ // the proper allocator and the properly padded size.
+ // In contrast, the destructor deallocates the data:
+# ifndef __GC
+ ~_Rope_RopeLeaf() {
+ if (_M_data != _M_c_string) {
+ _M_free_c_string();
+ }
+ __STL_FREE_STRING(_M_data, _M_size, get_allocator());
+ }
+# endif
};
-template<class charT, class Alloc>
-struct __rope_RopeLeaf : public __rope_RopeBase<charT,Alloc> {
- public: // Apparently needed by VC++
- __GC_CONST charT* data; /* Not necessarily 0 terminated. */
- /* The allocated size is */
- /* rounded_up_size(size), except */
- /* in the GC case, in which it */
- /* doesn't matter. */
-};
-
-template<class charT, class Alloc>
-struct __rope_RopeConcatenation : public __rope_RopeBase<charT,Alloc> {
+template<class _CharT, class _Alloc>
+struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
public:
- __rope_RopeBase<charT,Alloc>* left;
- __rope_RopeBase<charT,Alloc>* right;
+ _Rope_RopeRep<_CharT,_Alloc>* _M_left;
+ _Rope_RopeRep<_CharT,_Alloc>* _M_right;
+ typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
+ _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
+ _Rope_RopeRep<_CharT,_Alloc>* __r,
+ allocator_type __a)
+ : _M_left(__l), _M_right(__r)
+ , _Rope_RopeRep<_CharT,_Alloc>(
+ _S_concat, max(__l->_M_depth, __r->_M_depth) + 1, false,
+ __l->_M_size + __r->_M_size, __a)
+ {}
+# ifndef __GC
+ ~_Rope_RopeConcatenation() {
+ _M_free_c_string();
+ _M_left->_M_unref_nonnil();
+ _M_right->_M_unref_nonnil();
+ }
+# endif
};
-template<class charT, class Alloc>
-struct __rope_RopeFunction : public __rope_RopeBase<charT,Alloc> {
+template<class _CharT, class _Alloc>
+struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
public:
- char_producer<charT>* fn;
+ char_producer<_CharT>* _M_fn;
# ifndef __GC
- bool delete_when_done; // Char_producer is owned by the
- // rope and should be explicitly
- // deleted when the rope becomes
- // inaccessible.
+ bool _M_delete_when_done; // Char_producer is owned by the
+ // rope and should be explicitly
+ // deleted when the rope becomes
+ // inaccessible.
# else
// In the GC case, we either register the rope for
// finalization, or not. Thus the field is unnecessary;
// the information is stored in the collector data structures.
+ // We do need a finalization procedure to be invoked by the
+ // collector.
+ static void _S_fn_finalization_proc(void * __tree, void *) {
+ delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
+ }
# endif
+ typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
+ _Rope_RopeFunction(char_producer<_CharT>* __f, size_t __size,
+ bool __d, allocator_type __a)
+ : _M_fn(__f)
+# ifndef __GC
+ , _M_delete_when_done(__d)
+# endif
+ , _Rope_RopeRep<_CharT,_Alloc>(_S_function, 0, true, __size, __a) {
+ __stl_assert(__size > 0);
+# ifdef __GC
+ if (__d) {
+ GC_REGISTER_FINALIZER(
+ this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
+ }
+# endif
+ }
+# ifndef __GC
+ ~_Rope_RopeFunction() {
+ _M_free_c_string();
+ if (_M_delete_when_done) {
+ delete _M_fn;
+ }
+ }
+# endif
};
// Substring results are usually represented using just
// concatenation nodes. But in the case of very long flat ropes
// or ropes with a functional representation that isn't practical.
-// In that case, we represent the result as a special case of
+// In that case, we represent the __result as a special case of
// RopeFunction, whose char_producer points back to the rope itself.
// In all cases except repeated substring operations and
-// deallocation, we treat the result as a RopeFunction.
-template<class charT, class Alloc>
-struct __rope_RopeSubstring: public __rope_RopeFunction<charT,Alloc>,
- public char_producer<charT> {
+// deallocation, we treat the __result as a RopeFunction.
+template<class _CharT, class _Alloc>
+struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
+ public char_producer<_CharT> {
public:
- __rope_RopeBase<charT,Alloc> * base; // not 0
- size_t start;
- virtual ~__rope_RopeSubstring() {}
- virtual void operator()(size_t start_pos, size_t req_len,
- charT *buffer) {
- switch(base -> tag) {
- case function:
- case substringfn:
- {
- char_producer<charT> *fn =
- ((__rope_RopeFunction<charT,Alloc> *)base) -> fn;
- __stl_assert(start_pos + req_len <= size);
- __stl_assert(start + size <= base -> size);
- (*fn)(start_pos + start, req_len, buffer);
- }
- break;
- case leaf:
- {
- __GC_CONST charT * s =
- ((__rope_RopeLeaf<charT,Alloc> *)base) -> data;
- uninitialized_copy_n(s + start_pos + start, req_len,
- buffer);
- }
- break;
- default:
- __stl_assert(false);
- }
+ // XXX this whole class should be rewritten.
+ _Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0
+ size_t _M_start;
+ virtual void operator()(size_t __start_pos, size_t __req_len,
+ _CharT* __buffer) {
+ switch(_M_base->_M_tag) {
+ case _S_function:
+ case _S_substringfn:
+ {
+ char_producer<_CharT>* __fn =
+ ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
+ __stl_assert(__start_pos + __req_len <= _M_size);
+ __stl_assert(_M_start + _M_size <= _M_base->_M_size);
+ (*__fn)(__start_pos + _M_start, __req_len, __buffer);
+ }
+ break;
+ case _S_leaf:
+ {
+ __GC_CONST _CharT* __s =
+ ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
+ uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
+ __buffer);
+ }
+ break;
+ default:
+ __stl_assert(false);
+ }
}
- __rope_RopeSubstring(__rope_RopeBase<charT,Alloc> * b, size_t s, size_t l) :
- base(b), start(s) {
+ typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
+ _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
+ size_t __l, allocator_type __a)
+ : _M_base(__b)
+ , _M_start(__s)
+ , _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a)
+ {
+ __stl_assert(__l > 0);
+ __stl_assert(__s + __l <= __b->_M_size);
# ifndef __GC
- refcount = 1;
- init_refcount_lock();
- base -> ref_nonnil();
+ _M_base->_M_ref_nonnil();
# endif
- size = l;
- tag = substringfn;
- depth = 0;
- c_string = 0;
- fn = this;
+ _M_tag = _S_substringfn;
}
+ virtual ~_Rope_RopeSubstring()
+ {
+# ifndef __GC
+ _M_base->_M_unref_nonnil();
+ // _M_free_c_string(); -- done by parent class
+# endif
+ }
};
-// Self-destructing pointers to RopeBase.
+// Self-destructing pointers to Rope_rep.
// These are not conventional smart pointers. Their
// only purpose in life is to ensure that unref is called
// on the pointer either at normal exit or if an exception
@@ -504,21 +724,22 @@ struct __rope_RopeSubstring: public __rope_RopeFunction<charT,Alloc>,
// the number of potentially expensive reference count
// updates.)
#ifndef __GC
- template<class charT, class Alloc>
- struct __rope_self_destruct_ptr {
- __rope_RopeBase<charT,Alloc> * ptr;
- ~__rope_self_destruct_ptr() { __rope_RopeBase<charT,Alloc>::unref(ptr); }
+ template<class _CharT, class _Alloc>
+ struct _Rope_self_destruct_ptr {
+ _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
+ ~_Rope_self_destruct_ptr()
+ { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
# ifdef __STL_USE_EXCEPTIONS
- __rope_self_destruct_ptr() : ptr(0) {};
+ _Rope_self_destruct_ptr() : _M_ptr(0) {};
# else
- __rope_self_destruct_ptr() {};
+ _Rope_self_destruct_ptr() {};
# endif
- __rope_self_destruct_ptr(__rope_RopeBase<charT,Alloc> * p) : ptr(p) {}
- __rope_RopeBase<charT,Alloc> & operator*() { return *ptr; }
- __rope_RopeBase<charT,Alloc> * operator->() { return ptr; }
- operator __rope_RopeBase<charT,Alloc> *() { return ptr; }
- __rope_self_destruct_ptr & operator= (__rope_RopeBase<charT,Alloc> * x)
- { ptr = x; return *this; }
+ _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
+ _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
+ _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
+ operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
+ _Rope_self_destruct_ptr& operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
+ { _M_ptr = __x; return *this; }
};
#endif
@@ -527,73 +748,100 @@ struct __rope_RopeSubstring: public __rope_RopeFunction<charT,Alloc>,
// return an actual reference since assignment requires extra
// work. And we would get into the same problems as with the
// CD2 version of basic_string.
-template<class charT, class Alloc>
-class __rope_charT_ref_proxy {
- friend class rope<charT,Alloc>;
- friend class __rope_iterator<charT,Alloc>;
- friend class __rope_charT_ptr_proxy<charT,Alloc>;
+template<class _CharT, class _Alloc>
+class _Rope_char_ref_proxy {
+ friend class rope<_CharT,_Alloc>;
+ friend class _Rope_iterator<_CharT,_Alloc>;
+ friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
# ifdef __GC
- typedef __rope_RopeBase<charT,Alloc> * self_destruct_ptr;
+ typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
# else
- typedef __rope_self_destruct_ptr<charT,Alloc> self_destruct_ptr;
+ typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
# endif
- typedef __rope_RopeBase<charT,Alloc> RopeBase;
- typedef rope<charT,Alloc> my_rope;
- size_t pos;
- charT current;
- bool current_valid;
- my_rope * root; // The whole rope.
+ typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
+ typedef rope<_CharT,_Alloc> _My_rope;
+ size_t _M_pos;
+ _CharT _M_current;
+ bool _M_current_valid;
+ _My_rope* _M_root; // The whole rope.
public:
- __rope_charT_ref_proxy(my_rope * r, size_t p) :
- pos(p), root(r), current_valid(false) {}
- __rope_charT_ref_proxy(my_rope * r, size_t p,
- charT c) :
- pos(p), root(r), current(c), current_valid(true) {}
- operator charT () const;
- __rope_charT_ref_proxy& operator= (charT c);
- __rope_charT_ptr_proxy<charT,Alloc> operator& () const;
- __rope_charT_ref_proxy& operator= (const __rope_charT_ref_proxy& c) {
- return operator=((charT)c);
+ _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
+ _M_pos(__p), _M_root(__r), _M_current_valid(false) {}
+ _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x) :
+ _M_pos(__x._M_pos), _M_root(__x._M_root), _M_current_valid(false) {}
+ // Don't preserve cache if the reference can outlive the
+ // expression. We claim that's not possible without calling
+ // a copy constructor or generating reference to a proxy
+ // reference. We declare the latter to have undefined semantics.
+ _Rope_char_ref_proxy(_My_rope* __r, size_t __p,
+ _CharT __c) :
+ _M_pos(__p), _M_root(__r), _M_current(__c), _M_current_valid(true) {}
+ inline operator _CharT () const;
+ _Rope_char_ref_proxy& operator= (_CharT __c);
+ _Rope_char_ptr_proxy<_CharT,_Alloc> operator& () const;
+ _Rope_char_ref_proxy& operator= (const _Rope_char_ref_proxy& __c) {
+ return operator=((_CharT)__c);
}
};
-template<class charT, class Alloc>
-class __rope_charT_ptr_proxy {
- friend class __rope_charT_ref_proxy<charT,Alloc>;
- size_t pos;
- charT current;
- bool current_valid;
- rope<charT,Alloc> * root; // The whole rope.
+#ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
+ template<class _CharT, class __Alloc>
+ inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
+ _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
+ _CharT __tmp = __a;
+ __a = __b;
+ __b = __tmp;
+ }
+#else
+// There is no really acceptable way to handle this. The default
+// definition of swap doesn't work for proxy references.
+// It can't really be made to work, even with ugly hacks, since
+// the only unusual operation it uses is the copy constructor, which
+// is needed for other purposes. We provide a macro for
+// full specializations, and instantiate the most common case.
+# define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
+ inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
+ _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
+ _CharT __tmp = __a; \
+ __a = __b; \
+ __b = __tmp; \
+ }
+
+_ROPE_SWAP_SPECIALIZATION(char,__STL_DEFAULT_ALLOCATOR(char))
+
+#endif /* !__STL_FUNCTION_TMPL_PARTIAL_ORDER */
+
+template<class _CharT, class _Alloc>
+class _Rope_char_ptr_proxy {
+ // XXX this class should be rewritten.
+ friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
+ size_t _M_pos;
+ rope<_CharT,_Alloc>* _M_root; // The whole rope.
public:
- __rope_charT_ptr_proxy(const __rope_charT_ref_proxy<charT,Alloc> & x) :
- pos(x.pos), root(x.root), current_valid(x.current_valid),
- current(x.current) {}
- __rope_charT_ptr_proxy(const __rope_charT_ptr_proxy & x) :
- pos(x.pos), root(x.root), current_valid(x.current_valid),
- current(x.current) {}
- __rope_charT_ptr_proxy() {}
- __rope_charT_ptr_proxy(charT * x) : root(0), pos(0) {
- __stl_assert(0 == x);
+ _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
+ : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
+ _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x)
+ : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
+ _Rope_char_ptr_proxy() {}
+ _Rope_char_ptr_proxy(_CharT* __x) : _M_root(0), _M_pos(0) {
+ __stl_assert(0 == __x);
}
- __rope_charT_ptr_proxy& operator= (const __rope_charT_ptr_proxy& x) {
- pos = x.pos;
- current = x.current;
- current_valid = x.current_valid;
- root = x.root;
- return *this;
+ _Rope_char_ptr_proxy&
+ operator= (const _Rope_char_ptr_proxy& __x) {
+ _M_pos = __x._M_pos;
+ _M_root = __x._M_root;
+ return *this;
}
- friend bool operator== __STL_NULL_TMPL_ARGS
- (const __rope_charT_ptr_proxy<charT,Alloc> & x,
- const __rope_charT_ptr_proxy<charT,Alloc> & y);
- __rope_charT_ref_proxy<charT,Alloc> operator *() const {
- if (current_valid) {
- return __rope_charT_ref_proxy<charT,Alloc>(root, pos, current);
- } else {
- return __rope_charT_ref_proxy<charT,Alloc>(root, pos);
- }
+ friend bool operator== __STL_NULL_TMPL_ARGS
+ (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
+ const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y);
+
+ _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
+ return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
}
};
+
// Rope iterators:
// Unlike in the C version, we cache only part of the stack
// for rope iterators, since they must be efficiently copyable.
@@ -607,294 +855,297 @@ class __rope_charT_ptr_proxy {
#pragma set woff 1375
#endif
-template<class charT, class Alloc>
-class __rope_iterator_base:
- public random_access_iterator<charT, ptrdiff_t> {
- friend class rope<charT, Alloc>;
+template<class _CharT, class _Alloc>
+class _Rope_iterator_base
+ : public random_access_iterator<_CharT, ptrdiff_t> {
+ friend class rope<_CharT,_Alloc>;
public:
- typedef __rope_RopeBase<charT,Alloc> RopeBase;
- // Borland doesnt want this to be protected.
+ typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
+ // Borland doesnt want this to be protected.
protected:
- enum { path_cache_len = 4 }; // Must be <= 9.
- enum { iterator_buf_len = 15 };
- size_t current_pos;
- RopeBase * root; // The whole rope.
- size_t leaf_pos; // Starting position for current leaf
- __GC_CONST charT * buf_start;
- // Buffer possibly
- // containing current char.
- __GC_CONST charT * buf_ptr;
- // Pointer to current char in buffer.
- // != 0 ==> buffer valid.
- __GC_CONST charT * buf_end;
- // One past last valid char in buffer.
+ enum { _S_path_cache_len = 4 }; // Must be <= 9.
+ enum { _S_iterator_buf_len = 15 };
+ size_t _M_current_pos;
+ _RopeRep* _M_root; // The whole rope.
+ size_t _M_leaf_pos; // Starting position for current leaf
+ __GC_CONST _CharT* _M_buf_start;
+ // Buffer possibly
+ // containing current char.
+ __GC_CONST _CharT* _M_buf_ptr;
+ // Pointer to current char in buffer.
+ // != 0 ==> buffer valid.
+ __GC_CONST _CharT* _M_buf_end;
+ // One past __last valid char in buffer.
// What follows is the path cache. We go out of our
// way to make this compact.
// Path_end contains the bottom section of the path from
// the root to the current leaf.
- const RopeBase * path_end[path_cache_len];
- int leaf_index; // Last valid pos in path_end;
- // path_end[0] ... path_end[leaf_index-1]
- // point to concatenation nodes.
- unsigned char path_directions;
- // (path_directions >> i) & 1 is 1
- // iff we got from path_end[leaf_index - i - 1]
- // to path_end[leaf_index - i] by going to the
- // right. Assumes path_cache_len <= 9.
- charT tmp_buf[iterator_buf_len];
- // Short buffer for surrounding chars.
- // This is useful primarily for
- // RopeFunctions. We put the buffer
- // here to avoid locking in the
- // multithreaded case.
+ const _RopeRep* _M_path_end[_S_path_cache_len];
+ int _M_leaf_index; // Last valid __pos in path_end;
+ // _M_path_end[0] ... _M_path_end[leaf_index-1]
+ // point to concatenation nodes.
+ unsigned char _M_path_directions;
+ // (path_directions >> __i) & 1 is 1
+ // iff we got from _M_path_end[leaf_index - __i - 1]
+ // to _M_path_end[leaf_index - __i] by going to the
+ // __right. Assumes path_cache_len <= 9.
+ _CharT _M_tmp_buf[_S_iterator_buf_len];
+ // Short buffer for surrounding chars.
+ // This is useful primarily for
+ // RopeFunctions. We put the buffer
+ // here to avoid locking in the
+ // multithreaded case.
// The cached path is generally assumed to be valid
// only if the buffer is valid.
- static void setbuf(__rope_iterator_base &x);
- // Set buffer contents given
- // path cache.
- static void setcache(__rope_iterator_base &x);
- // Set buffer contents and
- // path cache.
- static void setcache_for_incr(__rope_iterator_base &x);
- // As above, but assumes path
- // cache is valid for previous posn.
- __rope_iterator_base() {}
- __rope_iterator_base(RopeBase * root, size_t pos):
- root(root), current_pos(pos), buf_ptr(0) {}
- __rope_iterator_base(const __rope_iterator_base& x) {
- if (0 != x.buf_ptr) {
- *this = x;
- } else {
- current_pos = x.current_pos;
- root = x.root;
- buf_ptr = 0;
- }
- }
- void incr(size_t n);
- void decr(size_t n);
+ static void _S_setbuf(_Rope_iterator_base& __x);
+ // Set buffer contents given
+ // path cache.
+ static void _S_setcache(_Rope_iterator_base& __x);
+ // Set buffer contents and
+ // path cache.
+ static void _S_setcache_for_incr(_Rope_iterator_base& __x);
+ // As above, but assumes path
+ // cache is valid for previous posn.
+ _Rope_iterator_base() {}
+ _Rope_iterator_base(_RopeRep* __root, size_t __pos)
+ : _M_root(__root), _M_current_pos(__pos), _M_buf_ptr(0) {}
+ void _M_incr(size_t __n);
+ void _M_decr(size_t __n);
public:
- size_t index() const { return current_pos; }
+ size_t index() const { return _M_current_pos; }
+ _Rope_iterator_base(const _Rope_iterator_base& __x) {
+ if (0 != __x._M_buf_ptr) {
+ *this = __x;
+ } else {
+ _M_current_pos = __x._M_current_pos;
+ _M_root = __x._M_root;
+ _M_buf_ptr = 0;
+ }
+ }
};
-template<class charT, class Alloc> class __rope_iterator;
+template<class _CharT, class _Alloc> class _Rope_iterator;
-template<class charT, class Alloc>
-class __rope_const_iterator : public __rope_iterator_base<charT,Alloc> {
- friend class rope<charT,Alloc>;
+template<class _CharT, class _Alloc>
+class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
+ friend class rope<_CharT,_Alloc>;
protected:
- __rope_const_iterator(const RopeBase * root, size_t pos):
- __rope_iterator_base<charT,Alloc>(
- const_cast<RopeBase *>(root), pos)
- // Only nonconst iterators modify root ref count
+ _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
+ _Rope_iterator_base<_CharT,_Alloc>(
+ const_cast<_RopeRep*>(__root), __pos)
+ // Only nonconst iterators modify root ref count
{}
public:
- typedef charT reference; // Really a value. Returning a reference
- // Would be a mess, since it would have
- // to be included in refcount.
- typedef const charT* pointer;
+ typedef _CharT reference; // Really a value. Returning a reference
+ // Would be a mess, since it would have
+ // to be included in refcount.
+ typedef const _CharT* pointer;
public:
- __rope_const_iterator() {};
- __rope_const_iterator(const __rope_const_iterator & x) :
- __rope_iterator_base<charT,Alloc>(x) { }
- __rope_const_iterator(const __rope_iterator<charT,Alloc> & x);
- __rope_const_iterator(const rope<charT,Alloc> &r, size_t pos) :
- __rope_iterator_base<charT,Alloc>(r.tree_ptr, pos) {}
- __rope_const_iterator& operator= (const __rope_const_iterator & x) {
- if (0 != x.buf_ptr) {
- *this = x;
- } else {
- current_pos = x.current_pos;
- root = x.root;
- buf_ptr = 0;
- }
- return(*this);
+ _Rope_const_iterator() {};
+ _Rope_const_iterator(const _Rope_const_iterator& __x) :
+ _Rope_iterator_base<_CharT,_Alloc>(__x) { }
+ _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x);
+ _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
+ _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) {}
+ _Rope_const_iterator& operator= (const _Rope_const_iterator& __x) {
+ if (0 != __x._M_buf_ptr) {
+ *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
+ } else {
+ _M_current_pos = __x._M_current_pos;
+ _M_root = __x._M_root;
+ _M_buf_ptr = 0;
+ }
+ return(*this);
}
reference operator*() {
- if (0 == buf_ptr) setcache(*this);
- return *buf_ptr;
+ if (0 == _M_buf_ptr) _S_setcache(*this);
+ return *_M_buf_ptr;
}
- __rope_const_iterator& operator++() {
- __GC_CONST charT * next;
- if (0 != buf_ptr && (next = buf_ptr + 1) < buf_end) {
- buf_ptr = next;
- ++current_pos;
- } else {
- incr(1);
- }
- return *this;
+ _Rope_const_iterator& operator++() {
+ __GC_CONST _CharT* __next;
+ if (0 != _M_buf_ptr && (__next = _M_buf_ptr + 1) < _M_buf_end) {
+ _M_buf_ptr = __next;
+ ++_M_current_pos;
+ } else {
+ _M_incr(1);
+ }
+ return *this;
}
- __rope_const_iterator& operator+=(ptrdiff_t n) {
- if (n >= 0) {
- incr(n);
- } else {
- decr(-n);
- }
- return *this;
+ _Rope_const_iterator& operator+=(ptrdiff_t __n) {
+ if (__n >= 0) {
+ _M_incr(__n);
+ } else {
+ _M_decr(-__n);
+ }
+ return *this;
}
- __rope_const_iterator& operator--() {
- decr(1);
- return *this;
+ _Rope_const_iterator& operator--() {
+ _M_decr(1);
+ return *this;
}
- __rope_const_iterator& operator-=(ptrdiff_t n) {
- if (n >= 0) {
- decr(n);
- } else {
- incr(-n);
- }
- return *this;
+ _Rope_const_iterator& operator-=(ptrdiff_t __n) {
+ if (__n >= 0) {
+ _M_decr(__n);
+ } else {
+ _M_incr(-__n);
+ }
+ return *this;
}
- __rope_const_iterator operator++(int) {
- size_t old_pos = current_pos;
- incr(1);
- return __rope_const_iterator<charT,Alloc>(root, old_pos);
- // This makes a subsequent dereference expensive.
- // Perhaps we should instead copy the iterator
- // if it has a valid cache?
+ _Rope_const_iterator operator++(int) {
+ size_t __old_pos = _M_current_pos;
+ _M_incr(1);
+ return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
+ // This makes a subsequent dereference expensive.
+ // Perhaps we should instead copy the iterator
+ // if it has a valid cache?
}
- __rope_const_iterator operator--(int) {
- size_t old_pos = current_pos;
- decr(1);
- return __rope_const_iterator<charT,Alloc>(root, old_pos);
+ _Rope_const_iterator operator--(int) {
+ size_t __old_pos = _M_current_pos;
+ _M_decr(1);
+ return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
}
- friend __rope_const_iterator<charT,Alloc> operator- __STL_NULL_TMPL_ARGS
- (const __rope_const_iterator<charT,Alloc> & x,
- ptrdiff_t n);
- friend __rope_const_iterator<charT,Alloc> operator+ __STL_NULL_TMPL_ARGS
- (const __rope_const_iterator<charT,Alloc> & x,
- ptrdiff_t n);
- friend __rope_const_iterator<charT,Alloc> operator+ __STL_NULL_TMPL_ARGS
- (ptrdiff_t n,
- const __rope_const_iterator<charT,Alloc> & x);
- reference operator[](size_t n) {
- return rope<charT,Alloc>::fetch(root, current_pos + n);
+ friend _Rope_const_iterator<_CharT,_Alloc> operator- __STL_NULL_TMPL_ARGS
+ (const _Rope_const_iterator<_CharT,_Alloc>& __x,
+ ptrdiff_t __n);
+ friend _Rope_const_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS
+ (const _Rope_const_iterator<_CharT,_Alloc>& __x,
+ ptrdiff_t __n);
+ friend _Rope_const_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS
+ (ptrdiff_t __n,
+ const _Rope_const_iterator<_CharT,_Alloc>& __x);
+ reference operator[](size_t __n) {
+ return rope<_CharT,_Alloc>::_S_fetch(_M_root, _M_current_pos + __n);
}
friend bool operator== __STL_NULL_TMPL_ARGS
- (const __rope_const_iterator<charT,Alloc> & x,
- const __rope_const_iterator<charT,Alloc> & y);
+ (const _Rope_const_iterator<_CharT,_Alloc>& __x,
+ const _Rope_const_iterator<_CharT,_Alloc>& __y);
friend bool operator< __STL_NULL_TMPL_ARGS
- (const __rope_const_iterator<charT,Alloc> & x,
- const __rope_const_iterator<charT,Alloc> & y);
+ (const _Rope_const_iterator<_CharT,_Alloc>& __x,
+ const _Rope_const_iterator<_CharT,_Alloc>& __y);
friend ptrdiff_t operator- __STL_NULL_TMPL_ARGS
- (const __rope_const_iterator<charT,Alloc> & x,
- const __rope_const_iterator<charT,Alloc> & y);
+ (const _Rope_const_iterator<_CharT,_Alloc>& __x,
+ const _Rope_const_iterator<_CharT,_Alloc>& __y);
};
-template<class charT, class Alloc>
-class __rope_iterator : public __rope_iterator_base<charT,Alloc> {
- friend class rope<charT,Alloc>;
+template<class _CharT, class _Alloc>
+class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
+ friend class rope<_CharT,_Alloc>;
protected:
- rope<charT,Alloc> * root_rope;
- // root is treated as a cached version of this,
- // and is used to detect changes to the underlying
- // rope.
- // Root is included in the reference count.
- // This is necessary so that we can detect changes reliably.
- // Unfortunately, it requires careful bookkeeping for the
- // nonGC case.
- __rope_iterator(rope<charT,Alloc> * r, size_t pos):
- __rope_iterator_base<charT,Alloc>(r -> tree_ptr, pos),
- root_rope(r) {
- RopeBase::ref(root);
- }
- void check();
+ rope<_CharT,_Alloc>* _M_root_rope;
+ // root is treated as a cached version of this,
+ // and is used to detect changes to the underlying
+ // rope.
+ // Root is included in the reference count.
+ // This is necessary so that we can detect changes reliably.
+ // Unfortunately, it requires careful bookkeeping for the
+ // nonGC case.
+ _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos)
+ : _Rope_iterator_base<_CharT,_Alloc>(__r->_M_tree_ptr, __pos),
+ _M_root_rope(__r)
+ { _RopeRep::_S_ref(_M_root); }
+
+ void _M_check();
public:
- typedef __rope_charT_ref_proxy<charT,Alloc> reference;
- typedef __rope_charT_ref_proxy<charT,Alloc>* pointer;
+ typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
+ typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
public:
- rope<charT,Alloc>& container() { return *root_rope; }
- __rope_iterator() {
- root = 0; // Needed for reference counting.
+ rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
+ _Rope_iterator() {
+ _M_root = 0; // Needed for reference counting.
};
- __rope_iterator(const __rope_iterator & x) :
- __rope_iterator_base<charT,Alloc>(x) {
- root_rope = x.root_rope;
- RopeBase::ref(root);
+ _Rope_iterator(const _Rope_iterator& __x) :
+ _Rope_iterator_base<_CharT,_Alloc>(__x) {
+ _M_root_rope = __x._M_root_rope;
+ _RopeRep::_S_ref(_M_root);
}
- __rope_iterator(rope<charT,Alloc>& r, size_t pos);
- ~__rope_iterator() {
- RopeBase::unref(root);
+ _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
+ ~_Rope_iterator() {
+ _RopeRep::_S_unref(_M_root);
}
- __rope_iterator& operator= (const __rope_iterator & x) {
- RopeBase *old = root;
-
- RopeBase::ref(x.root);
- if (0 != x.buf_ptr) {
- *this = x;
- } else {
- current_pos = x.current_pos;
- root = x.root;
- root_rope = x.root_rope;
- buf_ptr = 0;
- }
- RopeBase::unref(old);
- return(*this);
+ _Rope_iterator& operator= (const _Rope_iterator& __x) {
+ _RopeRep* __old = _M_root;
+
+ _RopeRep::_S_ref(__x._M_root);
+ if (0 != __x._M_buf_ptr) {
+ _M_root_rope = __x._M_root_rope;
+ *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
+ } else {
+ _M_current_pos = __x._M_current_pos;
+ _M_root = __x._M_root;
+ _M_root_rope = __x._M_root_rope;
+ _M_buf_ptr = 0;
+ }
+ _RopeRep::_S_unref(__old);
+ return(*this);
}
reference operator*() {
- check();
- if (0 == buf_ptr) {
- return __rope_charT_ref_proxy<charT,Alloc>(root_rope, current_pos);
- } else {
- return __rope_charT_ref_proxy<charT,Alloc>(root_rope,
- current_pos, *buf_ptr);
- }
+ _M_check();
+ if (0 == _M_buf_ptr) {
+ return _Rope_char_ref_proxy<_CharT,_Alloc>(
+ _M_root_rope, _M_current_pos);
+ } else {
+ return _Rope_char_ref_proxy<_CharT,_Alloc>(
+ _M_root_rope, _M_current_pos, *_M_buf_ptr);
+ }
}
- __rope_iterator& operator++() {
- incr(1);
- return *this;
+ _Rope_iterator& operator++() {
+ _M_incr(1);
+ return *this;
}
- __rope_iterator& operator+=(difference_type n) {
- if (n >= 0) {
- incr(n);
- } else {
- decr(-n);
- }
- return *this;
+ _Rope_iterator& operator+=(difference_type __n) {
+ if (__n >= 0) {
+ _M_incr(__n);
+ } else {
+ _M_decr(-__n);
+ }
+ return *this;
}
- __rope_iterator& operator--() {
- decr(1);
- return *this;
+ _Rope_iterator& operator--() {
+ _M_decr(1);
+ return *this;
}
- __rope_iterator& operator-=(difference_type n) {
- if (n >= 0) {
- decr(n);
- } else {
- incr(-n);
- }
- return *this;
+ _Rope_iterator& operator-=(difference_type __n) {
+ if (__n >= 0) {
+ _M_decr(__n);
+ } else {
+ _M_incr(-__n);
+ }
+ return *this;
}
- __rope_iterator operator++(int) {
- size_t old_pos = current_pos;
- incr(1);
- return __rope_iterator<charT,Alloc>(root_rope, old_pos);
+ _Rope_iterator operator++(int) {
+ size_t __old_pos = _M_current_pos;
+ _M_incr(1);
+ return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
}
- __rope_iterator operator--(int) {
- size_t old_pos = current_pos;
- decr(1);
- return __rope_iterator<charT,Alloc>(root_rope, old_pos);
+ _Rope_iterator operator--(int) {
+ size_t __old_pos = _M_current_pos;
+ _M_decr(1);
+ return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
}
- reference operator[](ptrdiff_t n) {
- return __rope_charT_ref_proxy<charT,Alloc>(root_rope, current_pos + n);
+ reference operator[](ptrdiff_t __n) {
+ return _Rope_char_ref_proxy<_CharT,_Alloc>(
+ _M_root_rope, _M_current_pos + __n);
}
friend bool operator== __STL_NULL_TMPL_ARGS
- (const __rope_iterator<charT,Alloc> & x,
- const __rope_iterator<charT,Alloc> & y);
+ (const _Rope_iterator<_CharT,_Alloc>& __x,
+ const _Rope_iterator<_CharT,_Alloc>& __y);
friend bool operator< __STL_NULL_TMPL_ARGS
- (const __rope_iterator<charT,Alloc> & x,
- const __rope_iterator<charT,Alloc> & y);
+ (const _Rope_iterator<_CharT,_Alloc>& __x,
+ const _Rope_iterator<_CharT,_Alloc>& __y);
friend ptrdiff_t operator- __STL_NULL_TMPL_ARGS
- (const __rope_iterator<charT,Alloc> & x,
- const __rope_iterator<charT,Alloc> & y);
- friend __rope_iterator<charT,Alloc> operator- __STL_NULL_TMPL_ARGS
- (const __rope_iterator<charT,Alloc> & x,
- ptrdiff_t n);
- friend __rope_iterator<charT,Alloc> operator+ __STL_NULL_TMPL_ARGS
- (const __rope_iterator<charT,Alloc> & x,
- ptrdiff_t n);
- friend __rope_iterator<charT,Alloc> operator+ __STL_NULL_TMPL_ARGS
- (ptrdiff_t n,
- const __rope_iterator<charT,Alloc> & x);
+ (const _Rope_iterator<_CharT,_Alloc>& __x,
+ const _Rope_iterator<_CharT,_Alloc>& __y);
+ friend _Rope_iterator<_CharT,_Alloc> operator- __STL_NULL_TMPL_ARGS
+ (const _Rope_iterator<_CharT,_Alloc>& __x,
+ ptrdiff_t __n);
+ friend _Rope_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS
+ (const _Rope_iterator<_CharT,_Alloc>& __x,
+ ptrdiff_t __n);
+ friend _Rope_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS
+ (ptrdiff_t __n,
+ const _Rope_iterator<_CharT,_Alloc>& __x);
};
@@ -902,1199 +1153,1376 @@ class __rope_iterator : public __rope_iterator_base<charT,Alloc> {
#pragma reset woff 1375
#endif
-template <class charT, class Alloc>
-class rope {
+// The rope base class encapsulates
+// the differences between SGI-style allocators and standard-conforming
+// allocators.
+
+#ifdef __STL_USE_STD_ALLOCATORS
+
+// Base class for ordinary allocators.
+template <class _CharT, class _Allocator, bool _IsStatic>
+class _Rope_alloc_base {
+public:
+ typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep;
+ typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
+ allocator_type;
+ allocator_type get_allocator() const { return _M_data_allocator; }
+ _Rope_alloc_base(_RopeRep *__t, const allocator_type& __a)
+ : _M_tree_ptr(__t), _M_data_allocator(__a) {}
+ _Rope_alloc_base(const allocator_type& __a)
+ : _M_data_allocator(__a) {}
+
+protected:
+ // The only data members of a rope:
+ allocator_type _M_data_allocator;
+ _RopeRep* _M_tree_ptr;
+
+# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
+ typedef typename \
+ _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
+ _Tp* __name##_allocate(size_t __n) const \
+ { return __name##Allocator(_M_data_allocator).allocate(__n); } \
+ void __name##_deallocate(_Tp *__p, size_t __n) const \
+ { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
+ __ROPE_DEFINE_ALLOCS(_Allocator)
+# undef __ROPE_DEFINE_ALLOC
+};
+
+// Specialization for allocators that have the property that we don't
+// actually have to store an allocator object.
+template <class _CharT, class _Allocator>
+class _Rope_alloc_base<_CharT,_Allocator,true> {
+public:
+ typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep;
+ typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
+ allocator_type;
+ allocator_type get_allocator() const { return allocator_type(); }
+ _Rope_alloc_base(_RopeRep *__t, const allocator_type&)
+ : _M_tree_ptr(__t) {}
+ _Rope_alloc_base(const allocator_type&) {}
+
+protected:
+ // The only data member of a rope:
+ _RopeRep *_M_tree_ptr;
+
+# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
+ typedef typename \
+ _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
+ typedef typename \
+ _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
+ static _Tp* __name##_allocate(size_t __n) \
+ { return __name##Alloc::allocate(__n); } \
+ static void __name##_deallocate(_Tp *__p, size_t __n) \
+ { __name##Alloc::deallocate(__p, __n); }
+ __ROPE_DEFINE_ALLOCS(_Allocator)
+# undef __ROPE_DEFINE_ALLOC
+};
+
+template <class _CharT, class _Alloc>
+struct _Rope_base
+ : public _Rope_alloc_base<_CharT,_Alloc,
+ _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
+{
+ typedef _Rope_alloc_base<_CharT,_Alloc,
+ _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
+ _Base;
+ typedef typename _Base::allocator_type allocator_type;
+ _Rope_base(_RopeRep* __t, const allocator_type& __a) : _Base(__t, __a) {}
+ _Rope_base(const allocator_type& __a) : _Base(__a) {}
+};
+
+#else /* !__STL_USE_STD_ALLOCATORS */
+
+template <class _CharT, class _Alloc>
+class _Rope_base {
+public:
+ typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
+ typedef _Alloc allocator_type;
+ static allocator_type get_allocator() { return allocator_type(); }
+ _Rope_base(_RopeRep * __t, const allocator_type&) : _M_tree_ptr(__t) {}
+ _Rope_base(const allocator_type&) {}
+
+protected:
+ // The only data member of a rope:
+ _RopeRep* _M_tree_ptr;
+
+# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
+ typedef simple_alloc<_Tp, _Alloc> __name##Alloc; \
+ static _Tp* __name##_allocate(size_t __n) \
+ { return __name##Alloc::allocate(__n); } \
+ static void __name##_deallocate(_Tp *__p, size_t __n) \
+ { __name##Alloc::deallocate(__p, __n); }
+ __ROPE_DEFINE_ALLOCS(_Alloc)
+# undef __ROPE_DEFINE_ALLOC
+};
+
+#endif /* __STL_USE_STD_ALLOCATORS */
+
+
+template <class _CharT, class _Alloc>
+class rope : public _Rope_base<_CharT,_Alloc> {
public:
- typedef charT value_type;
- typedef ptrdiff_t difference_type;
- typedef size_t size_type;
- typedef charT const_reference;
- typedef const charT* const_pointer;
- typedef __rope_iterator<charT,Alloc> iterator;
- typedef __rope_const_iterator<charT,Alloc> const_iterator;
- typedef __rope_charT_ref_proxy<charT,Alloc> reference;
- typedef __rope_charT_ptr_proxy<charT,Alloc> pointer;
-
- friend class __rope_iterator<charT,Alloc>;
- friend class __rope_const_iterator<charT,Alloc>;
- friend struct __rope_RopeBase<charT,Alloc>;
- friend class __rope_iterator_base<charT,Alloc>;
- friend class __rope_charT_ptr_proxy<charT,Alloc>;
- friend class __rope_charT_ref_proxy<charT,Alloc>;
- friend struct __rope_RopeSubstring<charT,Alloc>;
+ typedef _CharT value_type;
+ typedef ptrdiff_t difference_type;
+ typedef size_t size_type;
+ typedef _CharT const_reference;
+ typedef const _CharT* const_pointer;
+ typedef _Rope_iterator<_CharT,_Alloc> iterator;
+ typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
+ typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
+ typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
+
+ friend class _Rope_iterator<_CharT,_Alloc>;
+ friend class _Rope_const_iterator<_CharT,_Alloc>;
+ friend struct _Rope_RopeRep<_CharT,_Alloc>;
+ friend class _Rope_iterator_base<_CharT,_Alloc>;
+ friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
+ friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
+ friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
protected:
- typedef __GC_CONST charT * cstrptr;
+ typedef _Rope_base<_CharT,_Alloc> _Base;
+ typedef typename _Base::allocator_type allocator_type;
+# ifdef __STL_USE_NAMESPACES
+ using _Base::_M_tree_ptr;
+# endif
+ typedef __GC_CONST _CharT* _Cstrptr;
# ifdef __STL_SGI_THREADS
- static cstrptr atomic_swap(cstrptr *p, cstrptr q) {
+ static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {
# if __mips < 3 || !(defined (_ABIN32) || defined(_ABI64))
- return (cstrptr) test_and_set((unsigned long *)p,
- (unsigned long)q);
-# else
- return (cstrptr) __test_and_set((unsigned long *)p,
- (unsigned long)q);
-# endif
+ return (_Cstrptr) test_and_set((unsigned long*)__p,
+ (unsigned long)__q);
+# else
+ return (_Cstrptr) __test_and_set((unsigned long*)__p,
+ (unsigned long)__q);
+# endif
}
# elif defined(__STL_WIN32THREADS)
- static cstrptr atomic_swap(cstrptr *p, cstrptr q) {
- return (cstrptr) InterlockedExchange((LPLONG)p, (LONG)q);
- }
-# elif defined(__STL_PTHREADS)
- // This should be portable, but performance is expected
- // to be quite awful. This really needs platform specific
- // code.
- static pthread_mutex_t swap_lock;
- static cstrptr atomic_swap(cstrptr *p, cstrptr q) {
- pthread_mutex_lock(&swap_lock);
- cstrptr result = *p;
- *p = q;
- pthread_mutex_unlock(&swap_lock);
- return result;
+ static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {
+ return (_Cstrptr) InterlockedExchange(
+ (LPLONG)__p, (LONG)__q);
+ }
+# elif defined(__STL_PTHREADS)
+ // This should be portable, but performance is expected
+ // to be quite awful. This really needs platform specific
+ // code.
+ static pthread_mutex_t _S_swap_lock;
+ static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {
+ pthread_mutex_lock(&_S_swap_lock);
+ _Cstrptr __result = *__p;
+ *__p = __q;
+ pthread_mutex_unlock(&_S_swap_lock);
+ return __result;
+ }
+# else
+ static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {
+ _Cstrptr __result = *__p;
+ *__p = __q;
+ return __result;
}
-# else
- static cstrptr atomic_swap(cstrptr *p, cstrptr q) {
- cstrptr result = *p;
- *p = q;
- return result;
- }
# endif
- static charT empty_c_str[1];
-
- typedef simple_alloc<charT, Alloc> DataAlloc;
- typedef simple_alloc<__rope_RopeConcatenation<charT,Alloc>, Alloc> CAlloc;
- typedef simple_alloc<__rope_RopeLeaf<charT,Alloc>, Alloc> LAlloc;
- typedef simple_alloc<__rope_RopeFunction<charT,Alloc>, Alloc> FAlloc;
- typedef simple_alloc<__rope_RopeSubstring<charT,Alloc>, Alloc> SAlloc;
- static bool is0(charT c) { return c == __eos((charT *)0); }
- enum { copy_max = 23 };
- // For strings shorter than copy_max, we copy to
- // concatenate.
-
- typedef __rope_RopeBase<charT,Alloc> RopeBase;
- typedef __rope_RopeConcatenation<charT,Alloc> RopeConcatenation;
- typedef __rope_RopeLeaf<charT,Alloc> RopeLeaf;
- typedef __rope_RopeFunction<charT,Alloc> RopeFunction;
- typedef __rope_RopeSubstring<charT,Alloc> RopeSubstring;
-
- // The only data member of a rope:
- RopeBase *tree_ptr;
-
- // Retrieve a character at the indicated position.
- static charT fetch(RopeBase * r, size_type pos);
-
-# ifndef __GC
- // Obtain a pointer to the character at the indicated position.
- // The pointer can be used to change the character.
- // If such a pointer cannot be produced, as is frequently the
- // case, 0 is returned instead.
- // (Returns nonzero only if all nodes in the path have a refcount
- // of 1.)
- static charT * fetch_ptr(RopeBase * r, size_type pos);
-# endif
-
- static bool apply_to_pieces(
- // should be template parameter
- __rope_char_consumer<charT>& c,
- const RopeBase * r,
- size_t begin, size_t end);
- // begin and end are assumed to be in range.
-
-# ifndef __GC
- static void unref(RopeBase* t)
- {
- RopeBase::unref(t);
- }
- static void ref(RopeBase* t)
- {
- RopeBase::ref(t);
- }
+ static _CharT _S_empty_c_str[1];
+
+ static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
+ enum { _S_copy_max = 23 };
+ // For strings shorter than _S_copy_max, we copy to
+ // concatenate.
+
+ typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
+ typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
+ typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
+ typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
+ typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
+
+ // Retrieve a character at the indicated position.
+ static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
+
+# ifndef __GC
+ // Obtain a pointer to the character at the indicated position.
+ // The pointer can be used to change the character.
+ // If such a pointer cannot be produced, as is frequently the
+ // case, 0 is returned instead.
+ // (Returns nonzero only if all nodes in the path have a refcount
+ // of 1.)
+ static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
+# endif
+
+ static bool _S_apply_to_pieces(
+ // should be template parameter
+ _Rope_char_consumer<_CharT>& __c,
+ const _RopeRep* __r,
+ size_t __begin, size_t __end);
+ // begin and end are assumed to be in range.
+
+# ifndef __GC
+ static void _S_unref(_RopeRep* __t)
+ {
+ _RopeRep::_S_unref(__t);
+ }
+ static void _S_ref(_RopeRep* __t)
+ {
+ _RopeRep::_S_ref(__t);
+ }
# else /* __GC */
- static void unref(RopeBase* t) {}
- static void ref(RopeBase* t) {}
+ static void _S_unref(_RopeRep*) {}
+ static void _S_ref(_RopeRep*) {}
# endif
# ifdef __GC
- typedef __rope_RopeBase<charT,Alloc> * self_destruct_ptr;
-# else
- typedef __rope_self_destruct_ptr<charT,Alloc> self_destruct_ptr;
-# endif
-
- // Result is counted in refcount.
- static RopeBase * substring(RopeBase * base,
- size_t start, size_t endp1);
-
- static RopeBase * concat_char_iter(RopeBase * r,
- const charT *iter, size_t slen);
- // Concatenate rope and char ptr, copying s.
- // Should really take an arbitrary iterator.
- // Result is counted in refcount.
- static RopeBase * destr_concat_char_iter(RopeBase * r,
- const charT *iter, size_t slen)
- // As above, but one reference to r is about to be
- // destroyed. Thus the pieces may be recycled if all
- // relevent reference counts are 1.
-# ifdef __GC
- // We can't really do anything since refcounts are unavailable.
- { return concat_char_iter(r, iter, slen); }
-# else
- ;
-# endif
-
- static RopeBase * concat(RopeBase *left, RopeBase *right);
- // General concatenation on RopeBase. Result
- // has refcount of 1. Adjusts argument refcounts.
+ typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
+# else
+ typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
+# endif
+
+ // _Result is counted in refcount.
+ static _RopeRep* _S_substring(_RopeRep* __base,
+ size_t __start, size_t __endp1);
+
+ static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
+ const _CharT* __iter, size_t __slen);
+ // Concatenate rope and char ptr, copying __s.
+ // Should really take an arbitrary iterator.
+ // Result is counted in refcount.
+ static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
+ const _CharT* __iter, size_t __slen)
+ // As above, but one reference to __r is about to be
+ // destroyed. Thus the pieces may be recycled if all
+ // relevent reference counts are 1.
+# ifdef __GC
+ // We can't really do anything since refcounts are unavailable.
+ { return _S_concat_char_iter(__r, __iter, __slen); }
+# else
+ ;
+# endif
+
+ static _RopeRep* _S_concat(_RopeRep* __left, _RopeRep* __right);
+ // General concatenation on _RopeRep. _Result
+ // has refcount of 1. Adjusts argument refcounts.
public:
- void apply_to_pieces( size_t begin, size_t end,
- __rope_char_consumer<charT>& c) const {
- apply_to_pieces(c, tree_ptr, begin, end);
- }
+ void apply_to_pieces( size_t __begin, size_t __end,
+ _Rope_char_consumer<_CharT>& __c) const {
+ _S_apply_to_pieces(__c, _M_tree_ptr, __begin, __end);
+ }
protected:
- static size_t rounded_up_size(size_t n) {
- return RopeBase::rounded_up_size(n);
- }
-
- static size_t allocated_capacity(size_t n) {
- if (__is_basic_char_type((charT *)0)) {
- return rounded_up_size(n) - 1;
- } else {
- return rounded_up_size(n);
- }
- }
-
- // s should really be an arbitrary input iterator.
- // Adds a trailing NULL for basic char types.
- static charT * alloc_copy(const charT *s, size_t size)
- {
- charT * result = DataAlloc::allocate(rounded_up_size(size));
-
- uninitialized_copy_n(s, size, result);
- __cond_store_eos(result[size]);
- return(result);
- }
-
- // Basic constructors for rope tree nodes.
- // These return tree nodes with a 0 reference count.
- static RopeLeaf * RopeLeaf_from_char_ptr(__GC_CONST charT *s,
- size_t size);
- // Takes ownership of its argument.
- // Result has refcount 1.
- // In the nonGC, basic_char_type case it assumes that s
- // is eos-terminated.
- // In the nonGC case, it was allocated from Alloc with
- // rounded_up_size(size).
-
- static RopeLeaf * RopeLeaf_from_unowned_char_ptr(const charT *s,
- size_t size) {
- charT * buf = alloc_copy(s, size);
+ static size_t _S_rounded_up_size(size_t __n) {
+ return _RopeLeaf::_S_rounded_up_size(__n);
+ }
+
+ static size_t _S_allocated_capacity(size_t __n) {
+ if (_S_is_basic_char_type((_CharT*)0)) {
+ return _S_rounded_up_size(__n) - 1;
+ } else {
+ return _S_rounded_up_size(__n);
+ }
+ }
+
+ // Allocate and construct a RopeLeaf using the supplied allocator
+ // Takes ownership of s instead of copying.
+ static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
+ size_t __size, allocator_type __a)
+ {
+# ifdef __STL_USE_STD_ALLOCATORS
+ _RopeLeaf* __space = _LAllocator(__a).allocate(1);
+# else
+ _RopeLeaf* __space = _L_allocate(1);
+# endif
+ return new(__space) _RopeLeaf(__s, __size, __a);
+ }
+
+ static _RopeConcatenation* _S_new_RopeConcatenation(
+ _RopeRep* __left, _RopeRep* __right,
+ allocator_type __a)
+ {
+# ifdef __STL_USE_STD_ALLOCATORS
+ _RopeConcatenation* __space = _CAllocator(__a).allocate(1);
+# else
+ _RopeConcatenation* __space = _C_allocate(1);
+# endif
+ return new(__space) _RopeConcatenation(__left, __right, __a);
+ }
+
+ static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
+ size_t __size, bool __d, allocator_type __a)
+ {
+# ifdef __STL_USE_STD_ALLOCATORS
+ _RopeFunction* __space = _FAllocator(__a).allocate(1);
+# else
+ _RopeFunction* __space = _F_allocate(1);
+# endif
+ return new(__space) _RopeFunction(__f, __size, __d, __a);
+ }
+
+ static _RopeSubstring* _S_new_RopeSubstring(
+ _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
+ size_t __l, allocator_type __a)
+ {
+# ifdef __STL_USE_STD_ALLOCATORS
+ _RopeSubstring* __space = _SAllocator(__a).allocate(1);
+# else
+ _RopeSubstring* __space = _S_allocate(1);
+# endif
+ return new(__space) _RopeSubstring(__b, __s, __l, __a);
+ }
+
+# ifdef __STL_USE_STD_ALLOCATORS
+ static
+ _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
+ size_t __size, allocator_type __a)
+# define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
+ _S_RopeLeaf_from_unowned_char_ptr(__s, __size, __a)
+# else
+ static
+ _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr2(const _CharT* __s,
+ size_t __size)
+# define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
+ _S_RopeLeaf_from_unowned_char_ptr2(__s, __size)
+# endif
+ {
+ if (0 == __size) return 0;
+# ifdef __STL_USE_STD_ALLOCATORS
+ _CharT* __buf = __a.allocate(_S_rounded_up_size(__size));
+# else
+ _CharT* __buf = _Data_allocate(_S_rounded_up_size(__size));
+ allocator_type __a = allocator_type();
+# endif
+
+ uninitialized_copy_n(__s, __size, __buf);
+ _S_cond_store_eos(__buf[__size]);
__STL_TRY {
- return RopeLeaf_from_char_ptr(buf, size);
+ return _S_new_RopeLeaf(__buf, __size, __a);
}
- __STL_UNWIND(RopeBase::free_string(buf, size))
- }
-
-
- // Concatenation of nonempty strings.
- // Always builds a concatenation node.
- // Rebalances if the result is too deep.
- // Result has refcount 1.
- // Does not increment left and right ref counts even though
- // they are referenced.
- static RopeBase * tree_concat(RopeBase * left, RopeBase * right);
-
- // Result has refcount 1.
- // If delete_fn is true, then fn is deleted when the rope
- // becomes inaccessible.
- static RopeFunction * RopeFunction_from_fn
- (char_producer<charT> *fn, size_t size,
- bool delete_fn);
-
- // Concatenation helper functions
- static RopeLeaf * leaf_concat_char_iter
- (RopeLeaf * r, const charT * iter, size_t slen);
- // Concatenate by copying leaf.
- // should take an arbitrary iterator
- // result has refcount 1.
-# ifndef __GC
- static RopeLeaf * destr_leaf_concat_char_iter
- (RopeLeaf * r, const charT * iter, size_t slen);
- // A version that potentially clobbers r if r -> refcount == 1.
+ __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf, __size, __a))
+ }
+
+
+ // Concatenation of nonempty strings.
+ // Always builds a concatenation node.
+ // Rebalances if the result is too deep.
+ // Result has refcount 1.
+ // Does not increment left and right ref counts even though
+ // they are referenced.
+ static _RopeRep*
+ _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
+
+ // Concatenation helper functions
+ static _RopeLeaf*
+ _S_leaf_concat_char_iter(_RopeLeaf* __r,
+ const _CharT* __iter, size_t __slen);
+ // Concatenate by copying leaf.
+ // should take an arbitrary iterator
+ // result has refcount 1.
+# ifndef __GC
+ static _RopeLeaf* _S_destr_leaf_concat_char_iter
+ (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
+ // A version that potentially clobbers __r if __r->_M_refcount == 1.
# endif
- // A helper function for exponentiating strings.
- // This uses a nonstandard refcount convention.
- // The result has refcount 0.
- struct concat_fn;
- friend struct rope<charT,Alloc>::concat_fn;
-
- struct concat_fn
- : public binary_function<rope<charT,Alloc>, rope<charT,Alloc>,
- rope<charT,Alloc> > {
- rope operator() (const rope& x, const rope& y) {
- return x + y;
- }
- };
-
- friend rope identity_element(concat_fn) { return rope<charT,Alloc>(); }
-
- static size_t char_ptr_len(const charT * s);
- // slightly generalized strlen
-
- rope(RopeBase *t) : tree_ptr(t) { }
-
-
- // Copy r to the CharT buffer.
- // Returns buffer + r -> size.
- // Assumes that buffer is uninitialized.
- static charT * flatten(RopeBase * r, charT * buffer);
-
- // Again, with explicit starting position and length.
- // Assumes that buffer is uninitialized.
- static charT * flatten(RopeBase * r,
- size_t start, size_t len,
- charT * buffer);
-
- static const unsigned long min_len[RopeBase::max_rope_depth + 1];
-
- static bool is_balanced(RopeBase *r)
- { return (r -> size >= min_len[r -> depth]); }
-
- static bool is_almost_balanced(RopeBase *r)
- { return (r -> depth == 0 ||
- r -> size >= min_len[r -> depth - 1]); }
-
- static bool is_roughly_balanced(RopeBase *r)
- { return (r -> depth <= 1 ||
- r -> size >= min_len[r -> depth - 2]); }
-
- // Assumes the result is not empty.
- static RopeBase * concat_and_set_balanced(RopeBase *left,
- RopeBase *right)
- {
- RopeBase * result = concat(left, right);
- if (is_balanced(result)) result -> is_balanced = true;
- return result;
- }
-
- // The basic rebalancing operation. Logically copies the
- // rope. The result has refcount of 1. The client will
- // usually decrement the reference count of r.
- // The result isd within height 2 of balanced by the above
- // definition.
- static RopeBase * balance(RopeBase * r);
-
- // Add all unbalanced subtrees to the forest of balanceed trees.
- // Used only by balance.
- static void add_to_forest(RopeBase *r, RopeBase **forest);
-
- // Add r to forest, assuming r is already balanced.
- static void add_leaf_to_forest(RopeBase *r, RopeBase **forest);
-
- // Print to stdout, exposing structure
- static void dump(RopeBase * r, int indent = 0);
-
- // Return -1, 0, or 1 if x < y, x == y, or x > y resp.
- static int compare(const RopeBase *x, const RopeBase *y);
+ // A helper function for exponentiating strings.
+ // This uses a nonstandard refcount convention.
+ // The result has refcount 0.
+ struct _Concat_fn
+ : public binary_function<rope<_CharT,_Alloc>,
+ rope<_CharT,_Alloc>,
+ rope<_CharT,_Alloc> > {
+ rope operator() (const rope& __x, const rope& __y) {
+ return __x + __y;
+ }
+ };
+
+ // Needed by the call to "power" used to build ropes
+ // consisting of n copies of a character.
+ friend rope identity_element(_Concat_fn)
+ { return rope<_CharT,_Alloc>(); }
+
+ static size_t _S_char_ptr_len(const _CharT* __s);
+ // slightly generalized strlen
+
+ rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
+ : _Base(__t,__a) { }
+
+
+ // Copy __r to the _CharT buffer.
+ // Returns __buffer + __r->_M_size.
+ // Assumes that buffer is uninitialized.
+ static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
+
+ // Again, with explicit starting position and length.
+ // Assumes that buffer is uninitialized.
+ static _CharT* _S_flatten(_RopeRep* __r,
+ size_t __start, size_t __len,
+ _CharT* __buffer);
+
+ static const unsigned long
+ _S_min_len[_RopeRep::_S_max_rope_depth + 1];
+
+ static bool _S_is_balanced(_RopeRep* __r)
+ { return (__r->_M_size >= _S_min_len[__r->_M_depth]); }
+
+ static bool _S_is_almost_balanced(_RopeRep* __r)
+ { return (__r->_M_depth == 0 ||
+ __r->_M_size >= _S_min_len[__r->_M_depth - 1]); }
+
+ static bool _S_is_roughly_balanced(_RopeRep* __r)
+ { return (__r->_M_depth <= 1 ||
+ __r->_M_size >= _S_min_len[__r->_M_depth - 2]); }
+
+ // Assumes the result is not empty.
+ static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
+ _RopeRep* __right)
+ {
+ _RopeRep* __result = _S_concat(__left, __right);
+ if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
+ return __result;
+ }
+
+ // The basic rebalancing operation. Logically copies the
+ // rope. The result has refcount of 1. The client will
+ // usually decrement the reference count of __r.
+ // The result is within height 2 of balanced by the above
+ // definition.
+ static _RopeRep* _S_balance(_RopeRep* __r);
+
+ // Add all unbalanced subtrees to the forest of balanceed trees.
+ // Used only by balance.
+ static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
+
+ // Add __r to forest, assuming __r is already balanced.
+ static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
+
+ // Print to stdout, exposing structure
+ static void _S_dump(_RopeRep* __r, int __indent = 0);
+
+ // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
+ static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
public:
- bool empty() const { return 0 == tree_ptr; }
-
- // Comparison member function. This is public only for those
- // clients that need a ternary comparison. Others
- // should use the comparison operators below.
- int compare(const rope &y) const {
- return compare(tree_ptr, y.tree_ptr);
- }
-
- rope(const charT *s)
- {
- size_t len = char_ptr_len(s);
-
- if (0 == len) {
- tree_ptr = 0;
- } else {
- tree_ptr = RopeLeaf_from_unowned_char_ptr(s, len);
-# ifndef __GC
- __stl_assert(1 == tree_ptr -> refcount);
-# endif
- }
- }
-
- rope(const charT *s, size_t len)
- {
- if (0 == len) {
- tree_ptr = 0;
- } else {
- tree_ptr = RopeLeaf_from_unowned_char_ptr(s, len);
- }
- }
-
- rope(const charT *s, charT *e)
- {
- size_t len = e - s;
-
- if (0 == len) {
- tree_ptr = 0;
- } else {
- tree_ptr = RopeLeaf_from_unowned_char_ptr(s, len);
- }
- }
-
- rope(const const_iterator& s, const const_iterator& e)
- {
- tree_ptr = substring(s.root, s.current_pos, e.current_pos);
- }
-
- rope(const iterator& s, const iterator& e)
- {
- tree_ptr = substring(s.root, s.current_pos, e.current_pos);
- }
-
- rope(charT c)
- {
- charT * buf = DataAlloc::allocate(rounded_up_size(1));
-
- construct(buf, c);
- __STL_TRY {
- tree_ptr = RopeLeaf_from_char_ptr(buf, 1);
+ bool empty() const { return 0 == _M_tree_ptr; }
+
+ // Comparison member function. This is public only for those
+ // clients that need a ternary comparison. Others
+ // should use the comparison operators below.
+ int compare(const rope& __y) const {
+ return _S_compare(_M_tree_ptr, __y._M_tree_ptr);
+ }
+
+ rope(const _CharT* __s, const allocator_type& __a = allocator_type())
+ : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),
+ __a),__a)
+ { }
+
+ rope(const _CharT* __s, size_t __len,
+ const allocator_type& __a = allocator_type())
+ : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a), __a)
+ { }
+
+ // Should perhaps be templatized with respect to the iterator type
+ // and use Sequence_buffer. (It should perhaps use sequence_buffer
+ // even now.)
+ rope(const _CharT *__s, const _CharT *__e,
+ const allocator_type& __a = allocator_type())
+ : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a), __a)
+ { }
+
+ rope(const const_iterator& __s, const const_iterator& __e,
+ const allocator_type& __a = allocator_type())
+ : _Base(_S_substring(__s._M_root, __s._M_current_pos,
+ __e._M_current_pos), __a)
+ { }
+
+ rope(const iterator& __s, const iterator& __e,
+ const allocator_type& __a = allocator_type())
+ : _Base(_S_substring(__s._M_root, __s._M_current_pos,
+ __e._M_current_pos), __a)
+ { }
+
+ rope(_CharT __c, const allocator_type& __a = allocator_type())
+ : _Base(__a)
+ {
+ _CharT* __buf = _Data_allocate(_S_rounded_up_size(1));
+
+ construct(__buf, __c);
+ __STL_TRY {
+ _M_tree_ptr = _S_new_RopeLeaf(__buf, 1, __a);
}
- __STL_UNWIND(RopeBase::free_string(buf, 1))
- }
-
- rope(size_t n, charT c);
-
- // Should really be templatized with respect to the iterator type
- // and use sequence_buffer. (It should perhaps use sequence_buffer
- // even now.)
- rope(const charT *i, const charT *j)
- {
- if (i == j) {
- tree_ptr = 0;
- } else {
- size_t len = j - i;
- tree_ptr = RopeLeaf_from_unowned_char_ptr(i, len);
- }
- }
-
- rope()
- {
- tree_ptr = 0;
- }
-
- // Construct a rope from a function that can compute its members
- rope(char_producer<charT> *fn, size_t len, bool delete_fn)
- {
- tree_ptr = RopeFunction_from_fn(fn, len, delete_fn);
- }
-
- rope(const rope &x)
- {
- tree_ptr = x.tree_ptr;
- ref(tree_ptr);
- }
-
- ~rope()
- {
- unref(tree_ptr);
- }
-
- rope& operator=(const rope& x)
- {
- RopeBase *old = tree_ptr;
- tree_ptr = x.tree_ptr;
- ref(tree_ptr);
- unref(old);
- return(*this);
- }
-
- void push_back(charT x)
- {
- RopeBase *old = tree_ptr;
- tree_ptr = concat_char_iter(tree_ptr, &x, 1);
- unref(old);
- }
-
- void pop_back()
- {
- RopeBase *old = tree_ptr;
- tree_ptr = substring(tree_ptr, 0, tree_ptr -> size - 1);
- unref(old);
- }
-
- charT back() const
- {
- return fetch(tree_ptr, tree_ptr -> size - 1);
- }
-
- void push_front(charT x)
- {
- RopeBase *old = tree_ptr;
- RopeBase *left;
-
- left = RopeLeaf_from_unowned_char_ptr(&x, 1);
- __STL_TRY {
- tree_ptr = concat(left, tree_ptr);
- unref(old);
- unref(left);
+ __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf, 1, __a))
+ }
+
+ rope(size_t __n, _CharT __c,
+ const allocator_type& __a = allocator_type());
+
+ rope(const allocator_type& __a = allocator_type())
+ : _Base(0, __a) {}
+
+ // Construct a rope from a function that can compute its members
+ rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
+ const allocator_type& __a = allocator_type())
+ : _Base(__a)
+ {
+ _M_tree_ptr = (0 == __len) ?
+ 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
+ }
+
+ rope(const rope& __x, const allocator_type& __a = allocator_type())
+ : _Base(__x._M_tree_ptr, __a)
+ {
+ _S_ref(_M_tree_ptr);
+ }
+
+ ~rope()
+ {
+ _S_unref(_M_tree_ptr);
+ }
+
+ rope& operator=(const rope& __x)
+ {
+ _RopeRep* __old = _M_tree_ptr;
+# ifdef __STL_USE_STD_ALLOCATORS
+ __stl_assert(get_allocator() == __x.get_allocator());
+# endif
+ _M_tree_ptr = __x._M_tree_ptr;
+ _S_ref(_M_tree_ptr);
+ _S_unref(__old);
+ return(*this);
+ }
+
+ void push_back(_CharT __x)
+ {
+ _RopeRep* __old = _M_tree_ptr;
+ _M_tree_ptr = _S_concat_char_iter(_M_tree_ptr, &__x, 1);
+ _S_unref(__old);
+ }
+
+ void pop_back()
+ {
+ _RopeRep* __old = _M_tree_ptr;
+ _M_tree_ptr =
+ _S_substring(_M_tree_ptr, 0, _M_tree_ptr->_M_size - 1);
+ _S_unref(__old);
+ }
+
+ _CharT back() const
+ {
+ return _S_fetch(_M_tree_ptr, _M_tree_ptr->_M_size - 1);
+ }
+
+ void push_front(_CharT __x)
+ {
+ _RopeRep* __old = _M_tree_ptr;
+ _RopeRep* __left =
+ __STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
+ __STL_TRY {
+ _M_tree_ptr = _S_concat(__left, _M_tree_ptr);
+ _S_unref(__old);
+ _S_unref(__left);
}
- __STL_UNWIND(unref(left))
- }
-
- void pop_front()
- {
- RopeBase *old = tree_ptr;
- tree_ptr = substring(tree_ptr, 1, tree_ptr -> size);
- unref(old);
- }
-
- charT front() const
- {
- return fetch(tree_ptr, 0);
- }
-
- void balance()
- {
- RopeBase *old = tree_ptr;
- tree_ptr = balance(tree_ptr);
- unref(old);
- }
-
- void copy(charT * buffer) const {
- destroy(buffer, buffer + size());
- flatten(tree_ptr, buffer);
- }
-
- // This is the copy function from the standard, but
- // with the arguments reordered to make it consistent with the
- // rest of the interface.
- // Note that this guaranteed not to compile if the draft standard
- // order is assumed.
- size_type copy(size_type pos, size_type n, charT *buffer) const {
- size_t sz = size();
- size_t len = (pos + n > sz? sz - pos : n);
-
- destroy(buffer, buffer + len);
- flatten(tree_ptr, pos, len, buffer);
- return len;
- }
-
- // Print to stdout, exposing structure. May be useful for
- // performance debugging.
- void dump() {
- dump(tree_ptr);
- }
-
- // Convert to 0 terminated string in new allocated memory.
- // Embedded 0s in the input do not terminate the copy.
- const charT * c_str() const;
-
- // As above, but lso use the flattened representation as the
- // the new rope representation.
- const charT * replace_with_c_str();
-
- // Reclaim memory for the c_str generated flattened string.
- // Intentionally undocumented, since it's hard to say when this
- // is safe for multiple threads.
- void delete_c_str () {
- if (0 == tree_ptr) return;
- if (RopeBase::leaf == tree_ptr -> tag
- && ((RopeLeaf *)tree_ptr) -> data == tree_ptr -> c_string) {
- // Representation shared
- return;
- }
-# ifndef __GC
- tree_ptr -> free_c_string();
-# endif
- tree_ptr -> c_string = 0;
- }
-
- charT operator[] (size_type pos) const {
- return fetch(tree_ptr, pos);
- }
-
- charT at(size_type pos) const {
- // if (pos >= size()) throw out_of_range;
- return (*this)[pos];
- }
-
- const_iterator begin() const {
- return(const_iterator(tree_ptr, 0));
- }
-
- // An easy way to get a const iterator from a non-const container.
- const_iterator const_begin() const {
- return(const_iterator(tree_ptr, 0));
- }
-
- const_iterator end() const {
- return(const_iterator(tree_ptr, size()));
- }
-
- const_iterator const_end() const {
- return(const_iterator(tree_ptr, size()));
- }
-
- size_type size() const {
- return(0 == tree_ptr? 0 : tree_ptr -> size);
- }
-
- size_type length() const {
- return size();
- }
-
- size_type max_size() const {
- return min_len[RopeBase::max_rope_depth-1] - 1;
- // Guarantees that the result can be sufficirntly
- // balanced. Longer ropes will probably still work,
- // but it's harder to make guarantees.
- }
+ __STL_UNWIND(_S_unref(__left))
+ }
+
+ void pop_front()
+ {
+ _RopeRep* __old = _M_tree_ptr;
+ _M_tree_ptr = _S_substring(_M_tree_ptr, 1, _M_tree_ptr->_M_size);
+ _S_unref(__old);
+ }
+
+ _CharT front() const
+ {
+ return _S_fetch(_M_tree_ptr, 0);
+ }
+
+ void balance()
+ {
+ _RopeRep* __old = _M_tree_ptr;
+ _M_tree_ptr = _S_balance(_M_tree_ptr);
+ _S_unref(__old);
+ }
+
+ void copy(_CharT* __buffer) const {
+ destroy(__buffer, __buffer + size());
+ _S_flatten(_M_tree_ptr, __buffer);
+ }
+
+ // This is the copy function from the standard, but
+ // with the arguments reordered to make it consistent with the
+ // rest of the interface.
+ // Note that this guaranteed not to compile if the draft standard
+ // order is assumed.
+ size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
+ {
+ size_t __size = size();
+ size_t __len = (__pos + __n > __size? __size - __pos : __n);
+
+ destroy(__buffer, __buffer + __len);
+ _S_flatten(_M_tree_ptr, __pos, __len, __buffer);
+ return __len;
+ }
+
+ // Print to stdout, exposing structure. May be useful for
+ // performance debugging.
+ void dump() {
+ _S_dump(_M_tree_ptr);
+ }
+
+ // Convert to 0 terminated string in new allocated memory.
+ // Embedded 0s in the input do not terminate the copy.
+ const _CharT* c_str() const;
+
+ // As above, but lso use the flattened representation as the
+ // the new rope representation.
+ const _CharT* replace_with_c_str();
+
+ // Reclaim memory for the c_str generated flattened string.
+ // Intentionally undocumented, since it's hard to say when this
+ // is safe for multiple threads.
+ void delete_c_str () {
+ if (0 == _M_tree_ptr) return;
+ if (_RopeRep::_S_leaf == _M_tree_ptr->_M_tag &&
+ ((_RopeLeaf*)_M_tree_ptr)->_M_data ==
+ _M_tree_ptr->_M_c_string) {
+ // Representation shared
+ return;
+ }
+# ifndef __GC
+ _M_tree_ptr->_M_free_c_string();
+# endif
+ _M_tree_ptr->_M_c_string = 0;
+ }
+
+ _CharT operator[] (size_type __pos) const {
+ return _S_fetch(_M_tree_ptr, __pos);
+ }
+
+ _CharT at(size_type __pos) const {
+ // if (__pos >= size()) throw out_of_range; // XXX
+ return (*this)[__pos];
+ }
+
+ const_iterator begin() const {
+ return(const_iterator(_M_tree_ptr, 0));
+ }
+
+ // An easy way to get a const iterator from a non-const container.
+ const_iterator const_begin() const {
+ return(const_iterator(_M_tree_ptr, 0));
+ }
+
+ const_iterator end() const {
+ return(const_iterator(_M_tree_ptr, size()));
+ }
+
+ const_iterator const_end() const {
+ return(const_iterator(_M_tree_ptr, size()));
+ }
+
+ size_type size() const {
+ return(0 == _M_tree_ptr? 0 : _M_tree_ptr->_M_size);
+ }
+
+ size_type length() const {
+ return size();
+ }
+
+ size_type max_size() const {
+ return _S_min_len[_RopeRep::_S_max_rope_depth-1] - 1;
+ // Guarantees that the result can be sufficirntly
+ // balanced. Longer ropes will probably still work,
+ // but it's harder to make guarantees.
+ }
# ifdef __STL_CLASS_PARTIAL_SPECIALIZATION
typedef reverse_iterator<const_iterator> const_reverse_iterator;
# else /* __STL_CLASS_PARTIAL_SPECIALIZATION */
- typedef reverse_iterator<const_iterator, value_type, const_reference,
- difference_type> const_reverse_iterator;
+ typedef reverse_iterator<const_iterator, value_type, const_reference,
+ difference_type> const_reverse_iterator;
# endif /* __STL_CLASS_PARTIAL_SPECIALIZATION */
- const_reverse_iterator rbegin() const {
- return const_reverse_iterator(end());
- }
-
- const_reverse_iterator const_rbegin() const {
- return const_reverse_iterator(end());
- }
-
- const_reverse_iterator rend() const {
- return const_reverse_iterator(begin());
- }
-
- const_reverse_iterator const_rend() const {
- return const_reverse_iterator(begin());
- }
-
- friend rope<charT,Alloc>
- operator+ __STL_NULL_TMPL_ARGS (const rope<charT,Alloc> &left,
- const rope<charT,Alloc> &right);
-
- friend rope<charT,Alloc>
- operator+ __STL_NULL_TMPL_ARGS (const rope<charT,Alloc> &left,
- const charT* right);
-
- friend rope<charT,Alloc>
- operator+ __STL_NULL_TMPL_ARGS (const rope<charT,Alloc> &left,
- charT right);
-
- // The symmetric cases are intentionally omitted, since they're presumed
- // to be less common, and we don't handle them as well.
-
- // The following should really be templatized.
- // The first argument should be an input iterator or
- // forward iterator with value_type charT.
- rope& append(const charT* iter, size_t n) {
- RopeBase* result = destr_concat_char_iter(tree_ptr, iter, n);
- unref(tree_ptr);
- tree_ptr = result;
- return *this;
- }
-
- rope& append(const charT* c_string) {
- size_t len = char_ptr_len(c_string);
- append(c_string, len);
- return(*this);
- }
-
- rope& append(const charT* s, const charT* e) {
- RopeBase* result =
- destr_concat_char_iter(tree_ptr, s, e - s);
- unref(tree_ptr);
- tree_ptr = result;
- return *this;
- }
-
- rope& append(const_iterator s, const_iterator e) {
- __stl_assert(s.root == e.root);
- self_destruct_ptr appendee(substring(s.root, s.current_pos,
- e.current_pos));
- RopeBase* result = concat(tree_ptr, (RopeBase *)appendee);
- unref(tree_ptr);
- tree_ptr = result;
- return *this;
- }
-
- rope& append(charT c) {
- RopeBase* result = destr_concat_char_iter(tree_ptr, &c, 1);
- unref(tree_ptr);
- tree_ptr = result;
- return *this;
- }
-
- rope& append() { return append(charT()); }
-
- rope& append(const rope& y) {
- RopeBase* result = concat(tree_ptr, y.tree_ptr);
- unref(tree_ptr);
- tree_ptr = result;
- return *this;
- }
-
- rope& append(size_t n, charT c) {
- rope<charT,Alloc> last(n, c);
- return append(last);
- }
-
- void swap(rope& b) {
- RopeBase * tmp = tree_ptr;
- tree_ptr = b.tree_ptr;
- b.tree_ptr = tmp;
- }
+ const_reverse_iterator rbegin() const {
+ return const_reverse_iterator(end());
+ }
+
+ const_reverse_iterator const_rbegin() const {
+ return const_reverse_iterator(end());
+ }
+
+ const_reverse_iterator rend() const {
+ return const_reverse_iterator(begin());
+ }
+
+ const_reverse_iterator const_rend() const {
+ return const_reverse_iterator(begin());
+ }
+
+ friend rope<_CharT,_Alloc>
+ operator+ __STL_NULL_TMPL_ARGS (const rope<_CharT,_Alloc>& __left,
+ const rope<_CharT,_Alloc>& __right);
+
+ friend rope<_CharT,_Alloc>
+ operator+ __STL_NULL_TMPL_ARGS (const rope<_CharT,_Alloc>& __left,
+ const _CharT* __right);
+
+ friend rope<_CharT,_Alloc>
+ operator+ __STL_NULL_TMPL_ARGS (const rope<_CharT,_Alloc>& __left,
+ _CharT __right);
+
+ // The symmetric cases are intentionally omitted, since they're presumed
+ // to be less common, and we don't handle them as well.
+
+ // The following should really be templatized.
+ // The first argument should be an input iterator or
+ // forward iterator with value_type _CharT.
+ rope& append(const _CharT* __iter, size_t __n) {
+ _RopeRep* __result =
+ _S_destr_concat_char_iter(_M_tree_ptr, __iter, __n);
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ return *this;
+ }
+
+ rope& append(const _CharT* __c_string) {
+ size_t __len = _S_char_ptr_len(__c_string);
+ append(__c_string, __len);
+ return(*this);
+ }
+
+ rope& append(const _CharT* __s, const _CharT* __e) {
+ _RopeRep* __result =
+ _S_destr_concat_char_iter(_M_tree_ptr, __s, __e - __s);
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ return *this;
+ }
+
+ rope& append(const_iterator __s, const_iterator __e) {
+ __stl_assert(__s._M_root == __e._M_root);
+# ifdef __STL_USE_STD_ALLOCATORS
+ __stl_assert(get_allocator() == __s._M_root->get_allocator());
+# endif
+ _Self_destruct_ptr __appendee(_S_substring(
+ __s._M_root, __s._M_current_pos, __e._M_current_pos));
+ _RopeRep* __result =
+ _S_concat(_M_tree_ptr, (_RopeRep*)__appendee);
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ return *this;
+ }
+
+ rope& append(_CharT __c) {
+ _RopeRep* __result =
+ _S_destr_concat_char_iter(_M_tree_ptr, &__c, 1);
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ return *this;
+ }
+
+ rope& append() { return append(_CharT()); } // XXX why?
+
+ rope& append(const rope& __y) {
+# ifdef __STL_USE_STD_ALLOCATORS
+ __stl_assert(__y.get_allocator() == get_allocator());
+# endif
+ _RopeRep* __result = _S_concat(_M_tree_ptr, __y._M_tree_ptr);
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ return *this;
+ }
+
+ rope& append(size_t __n, _CharT __c) {
+ rope<_CharT,_Alloc> __last(__n, __c);
+ return append(__last);
+ }
+
+ void swap(rope& __b) {
+# ifdef __STL_USE_STD_ALLOCATORS
+ __stl_assert(get_allocator() == __b.get_allocator());
+# endif
+ _RopeRep* __tmp = _M_tree_ptr;
+ _M_tree_ptr = __b._M_tree_ptr;
+ __b._M_tree_ptr = __tmp;
+ }
protected:
- // Result is included in refcount.
- static RopeBase * replace(RopeBase *old, size_t pos1,
- size_t pos2, RopeBase *r) {
- if (0 == old) { ref(r); return r; }
- self_destruct_ptr left(substring(old, 0, pos1));
- self_destruct_ptr right(substring(old, pos2, old -> size));
- RopeBase * result;
-
- if (0 == r) {
- result = concat(left, right);
- } else {
- self_destruct_ptr left_result(concat(left, r));
- result = concat(left_result, right);
- }
- return result;
- }
+ // Result is included in refcount.
+ static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
+ size_t __pos2, _RopeRep* __r) {
+ if (0 == __old) { _S_ref(__r); return __r; }
+ _Self_destruct_ptr __left(
+ _S_substring(__old, 0, __pos1));
+ _Self_destruct_ptr __right(
+ _S_substring(__old, __pos2, __old->_M_size));
+ _RopeRep* __result;
+
+# ifdef __STL_USE_STD_ALLOCATORS
+ __stl_assert(__old->get_allocator() == __r->get_allocator());
+# endif
+ if (0 == __r) {
+ __result = _S_concat(__left, __right);
+ } else {
+ _Self_destruct_ptr __left_result(_S_concat(__left, __r));
+ __result = _S_concat(__left_result, __right);
+ }
+ return __result;
+ }
public:
- void insert(size_t p, const rope& r) {
- RopeBase * result = replace(tree_ptr, p, p,
- r.tree_ptr);
- unref(tree_ptr);
- tree_ptr = result;
- }
-
- void insert(size_t p, size_t n, charT c) {
- rope<charT,Alloc> r(n,c);
- insert(p, r);
- }
-
- void insert(size_t p, const charT * i, size_t n) {
- self_destruct_ptr left(substring(tree_ptr, 0, p));
- self_destruct_ptr right(substring(tree_ptr, p, size()));
- self_destruct_ptr left_result(concat_char_iter(left, i, n));
- RopeBase * result =
- concat(left_result, right);
- unref(tree_ptr);
- tree_ptr = result;
- }
-
- void insert(size_t p, const charT * c_string) {
- insert(p, c_string, char_ptr_len(c_string));
- }
-
- void insert(size_t p, charT c) {
- insert(p, &c, 1);
- }
-
- void insert(size_t p) {
- charT c = charT();
- insert(p, &c, 1);
- }
-
- void insert(size_t p, const charT *i, const charT *j) {
- rope r(i, j);
- insert(p, r);
- }
-
- void insert(size_t p, const const_iterator& i,
- const const_iterator& j) {
- rope r(i, j);
- insert(p, r);
- }
-
- void insert(size_t p, const iterator& i,
- const iterator& j) {
- rope r(i, j);
- insert(p, r);
- }
-
- // (position, length) versions of replace operations:
-
- void replace(size_t p, size_t n, const rope& r) {
- RopeBase * result = replace(tree_ptr, p, p + n,
- r.tree_ptr);
- unref(tree_ptr);
- tree_ptr = result;
- }
-
- void replace(size_t p, size_t n, const charT *i, size_t i_len) {
- rope r(i, i_len);
- replace(p, n, r);
- }
-
- void replace(size_t p, size_t n, charT c) {
- rope r(c);
- replace(p, n, r);
- }
-
- void replace(size_t p, size_t n, const charT *c_string) {
- rope r(c_string);
- replace(p, n, r);
- }
-
- void replace(size_t p, size_t n, const charT *i, const charT *j) {
- rope r(i, j);
- replace(p, n, r);
- }
-
- void replace(size_t p, size_t n,
- const const_iterator& i, const const_iterator& j) {
- rope r(i, j);
- replace(p, n, r);
- }
-
- void replace(size_t p, size_t n,
- const iterator& i, const iterator& j) {
- rope r(i, j);
- replace(p, n, r);
- }
-
- // Single character variants:
- void replace(size_t p, charT c) {
- iterator i(this, p);
- *i = c;
- }
-
- void replace(size_t p, const rope& r) {
- replace(p, 1, r);
- }
-
- void replace(size_t p, const charT *i, size_t i_len) {
- replace(p, 1, i, i_len);
- }
-
- void replace(size_t p, const charT *c_string) {
- replace(p, 1, c_string);
- }
-
- void replace(size_t p, const charT *i, const charT *j) {
- replace(p, 1, i, j);
- }
-
- void replace(size_t p, const const_iterator& i,
- const const_iterator& j) {
- replace(p, 1, i, j);
- }
-
- void replace(size_t p, const iterator& i,
- const iterator& j) {
- replace(p, 1, i, j);
- }
-
- // Erase, (position, size) variant.
- void erase(size_t p, size_t n) {
- RopeBase * result = replace(tree_ptr, p, p + n, 0);
- unref(tree_ptr);
- tree_ptr = result;
- }
-
- // Erase, single character
- void erase(size_t p) {
- erase(p, p + 1);
- }
-
- // Insert, iterator variants.
- iterator insert(const iterator& p, const rope& r)
- { insert(p.index(), r); return p; }
- iterator insert(const iterator& p, size_t n, charT c)
- { insert(p.index(), n, c); return p; }
- iterator insert(const iterator& p, charT c)
- { insert(p.index(), c); return p; }
- iterator insert(const iterator& p )
- { insert(p.index()); return p; }
- iterator insert(const iterator& p, const charT *c_string)
- { insert(p.index(), c_string); return p; }
- iterator insert(const iterator& p, const charT *i, size_t n)
- { insert(p.index(), i, n); return p; }
- iterator insert(const iterator& p, const charT *i, const charT *j)
- { insert(p.index(), i, j); return p; }
- iterator insert(const iterator& p,
- const const_iterator& i, const const_iterator& j)
- { insert(p.index(), i, j); return p; }
- iterator insert(const iterator& p,
- const iterator& i, const iterator& j)
- { insert(p.index(), i, j); return p; }
-
- // Replace, range variants.
- void replace(const iterator& p, const iterator& q,
- const rope& r)
- { replace(p.index(), q.index() - p.index(), r); }
- void replace(const iterator& p, const iterator& q, charT c)
- { replace(p.index(), q.index() - p.index(), c); }
- void replace(const iterator& p, const iterator& q,
- const charT * c_string)
- { replace(p.index(), q.index() - p.index(), c_string); }
- void replace(const iterator& p, const iterator& q,
- const charT *i, size_t n)
- { replace(p.index(), q.index() - p.index(), i, n); }
- void replace(const iterator& p, const iterator& q,
- const charT *i, const charT *j)
- { replace(p.index(), q.index() - p.index(), i, j); }
- void replace(const iterator& p, const iterator& q,
- const const_iterator& i, const const_iterator& j)
- { replace(p.index(), q.index() - p.index(), i, j); }
- void replace(const iterator& p, const iterator& q,
- const iterator& i, const iterator& j)
- { replace(p.index(), q.index() - p.index(), i, j); }
-
- // Replace, iterator variants.
- void replace(const iterator& p, const rope& r)
- { replace(p.index(), r); }
- void replace(const iterator& p, charT c)
- { replace(p.index(), c); }
- void replace(const iterator& p, const charT * c_string)
- { replace(p.index(), c_string); }
- void replace(const iterator& p, const charT *i, size_t n)
- { replace(p.index(), i, n); }
- void replace(const iterator& p, const charT *i, const charT *j)
- { replace(p.index(), i, j); }
- void replace(const iterator& p, const_iterator i, const_iterator j)
- { replace(p.index(), i, j); }
- void replace(const iterator& p, iterator i, iterator j)
- { replace(p.index(), i, j); }
-
- // Iterator and range variants of erase
- iterator erase(const iterator &p, const iterator &q) {
- size_t p_index = p.index();
- erase(p_index, q.index() - p_index);
- return iterator(this, p_index);
- }
- iterator erase(const iterator &p) {
- size_t p_index = p.index();
- erase(p_index, 1);
- return iterator(this, p_index);
- }
-
- rope substr(size_t start, size_t len = 1) const {
- return rope<charT,Alloc>(
- substring(tree_ptr, start, start + len));
- }
-
- rope substr(iterator start, iterator end) const {
- return rope<charT,Alloc>(
- substring(tree_ptr, start.index(), end.index()));
- }
-
- rope substr(iterator start) const {
- size_t pos = start.index();
- return rope<charT,Alloc>(
- substring(tree_ptr, pos, pos + 1));
- }
-
- rope substr(const_iterator start, const_iterator end) const {
- // This might eventually take advantage of the cache in the
- // iterator.
- return rope<charT,Alloc>
- (substring(tree_ptr, start.index(), end.index()));
- }
-
- rope<charT,Alloc> substr(const_iterator start) {
- size_t pos = start.index();
- return rope<charT,Alloc>(substring(tree_ptr, pos, pos + 1));
- }
-
- size_type find(charT c, size_type pos = 0) const;
- size_type find(charT *s, size_type pos = 0) const {
- const_iterator result = search(const_begin() + pos, const_end(),
- s, s + char_ptr_len(s));
- return result.index();
- }
-
- iterator mutable_begin() {
- return(iterator(this, 0));
- }
-
- iterator mutable_end() {
- return(iterator(this, size()));
- }
+ void insert(size_t __p, const rope& __r) {
+ _RopeRep* __result =
+ replace(_M_tree_ptr, __p, __p, __r._M_tree_ptr);
+# ifdef __STL_USE_STD_ALLOCATORS
+ __stl_assert(get_allocator() == __r.get_allocator());
+# endif
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ }
+
+ void insert(size_t __p, size_t __n, _CharT __c) {
+ rope<_CharT,_Alloc> __r(__n,__c);
+ insert(__p, __r);
+ }
+
+ void insert(size_t __p, const _CharT* __i, size_t __n) {
+ _Self_destruct_ptr __left(_S_substring(_M_tree_ptr, 0, __p));
+ _Self_destruct_ptr __right(_S_substring(_M_tree_ptr, __p, size()));
+ _Self_destruct_ptr __left_result(
+ _S_concat_char_iter(__left, __i, __n));
+ _RopeRep* __result = _S_concat(__left_result, __right);
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ }
+
+ void insert(size_t __p, const _CharT* __c_string) {
+ insert(__p, __c_string, _S_char_ptr_len(__c_string));
+ }
+
+ void insert(size_t __p, _CharT __c) {
+ insert(__p, &__c, 1);
+ }
+
+ void insert(size_t __p) {
+ _CharT __c = _CharT();
+ insert(__p, &__c, 1);
+ }
+
+ void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
+ rope __r(__i, __j);
+ insert(__p, __r);
+ }
+
+ void insert(size_t __p, const const_iterator& __i,
+ const const_iterator& __j) {
+ rope __r(__i, __j);
+ insert(__p, __r);
+ }
+
+ void insert(size_t __p, const iterator& __i,
+ const iterator& __j) {
+ rope __r(__i, __j);
+ insert(__p, __r);
+ }
+
+ // (position, length) versions of replace operations:
+
+ void replace(size_t __p, size_t __n, const rope& __r) {
+ _RopeRep* __result =
+ replace(_M_tree_ptr, __p, __p + __n, __r._M_tree_ptr);
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ }
+
+ void replace(size_t __p, size_t __n,
+ const _CharT* __i, size_t __i_len) {
+ rope __r(__i, __i_len);
+ replace(__p, __n, __r);
+ }
+
+ void replace(size_t __p, size_t __n, _CharT __c) {
+ rope __r(__c);
+ replace(__p, __n, __r);
+ }
+
+ void replace(size_t __p, size_t __n, const _CharT* __c_string) {
+ rope __r(__c_string);
+ replace(__p, __n, __r);
+ }
+
+ void replace(size_t __p, size_t __n,
+ const _CharT* __i, const _CharT* __j) {
+ rope __r(__i, __j);
+ replace(__p, __n, __r);
+ }
+
+ void replace(size_t __p, size_t __n,
+ const const_iterator& __i, const const_iterator& __j) {
+ rope __r(__i, __j);
+ replace(__p, __n, __r);
+ }
+
+ void replace(size_t __p, size_t __n,
+ const iterator& __i, const iterator& __j) {
+ rope __r(__i, __j);
+ replace(__p, __n, __r);
+ }
+
+ // Single character variants:
+ void replace(size_t __p, _CharT __c) {
+ iterator __i(this, __p);
+ *__i = __c;
+ }
+
+ void replace(size_t __p, const rope& __r) {
+ replace(__p, 1, __r);
+ }
+
+ void replace(size_t __p, const _CharT* __i, size_t __i_len) {
+ replace(__p, 1, __i, __i_len);
+ }
+
+ void replace(size_t __p, const _CharT* __c_string) {
+ replace(__p, 1, __c_string);
+ }
+
+ void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
+ replace(__p, 1, __i, __j);
+ }
+
+ void replace(size_t __p, const const_iterator& __i,
+ const const_iterator& __j) {
+ replace(__p, 1, __i, __j);
+ }
+
+ void replace(size_t __p, const iterator& __i,
+ const iterator& __j) {
+ replace(__p, 1, __i, __j);
+ }
+
+ // Erase, (position, size) variant.
+ void erase(size_t __p, size_t __n) {
+ _RopeRep* __result = replace(_M_tree_ptr, __p, __p + __n, 0);
+ _S_unref(_M_tree_ptr);
+ _M_tree_ptr = __result;
+ }
+
+ // Erase, single character
+ void erase(size_t __p) {
+ erase(__p, __p + 1);
+ }
+
+ // Insert, iterator variants.
+ iterator insert(const iterator& __p, const rope& __r)
+ { insert(__p.index(), __r); return __p; }
+ iterator insert(const iterator& __p, size_t __n, _CharT __c)
+ { insert(__p.index(), __n, __c); return __p; }
+ iterator insert(const iterator& __p, _CharT __c)
+ { insert(__p.index(), __c); return __p; }
+ iterator insert(const iterator& __p )
+ { insert(__p.index()); return __p; }
+ iterator insert(const iterator& __p, const _CharT* c_string)
+ { insert(__p.index(), c_string); return __p; }
+ iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
+ { insert(__p.index(), __i, __n); return __p; }
+ iterator insert(const iterator& __p, const _CharT* __i,
+ const _CharT* __j)
+ { insert(__p.index(), __i, __j); return __p; }
+ iterator insert(const iterator& __p,
+ const const_iterator& __i, const const_iterator& __j)
+ { insert(__p.index(), __i, __j); return __p; }
+ iterator insert(const iterator& __p,
+ const iterator& __i, const iterator& __j)
+ { insert(__p.index(), __i, __j); return __p; }
+
+ // Replace, range variants.
+ void replace(const iterator& __p, const iterator& __q,
+ const rope& __r)
+ { replace(__p.index(), __q.index() - __p.index(), __r); }
+ void replace(const iterator& __p, const iterator& __q, _CharT __c)
+ { replace(__p.index(), __q.index() - __p.index(), __c); }
+ void replace(const iterator& __p, const iterator& __q,
+ const _CharT* __c_string)
+ { replace(__p.index(), __q.index() - __p.index(), __c_string); }
+ void replace(const iterator& __p, const iterator& __q,
+ const _CharT* __i, size_t __n)
+ { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
+ void replace(const iterator& __p, const iterator& __q,
+ const _CharT* __i, const _CharT* __j)
+ { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
+ void replace(const iterator& __p, const iterator& __q,
+ const const_iterator& __i, const const_iterator& __j)
+ { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
+ void replace(const iterator& __p, const iterator& __q,
+ const iterator& __i, const iterator& __j)
+ { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
+
+ // Replace, iterator variants.
+ void replace(const iterator& __p, const rope& __r)
+ { replace(__p.index(), __r); }
+ void replace(const iterator& __p, _CharT __c)
+ { replace(__p.index(), __c); }
+ void replace(const iterator& __p, const _CharT* __c_string)
+ { replace(__p.index(), __c_string); }
+ void replace(const iterator& __p, const _CharT* __i, size_t __n)
+ { replace(__p.index(), __i, __n); }
+ void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
+ { replace(__p.index(), __i, __j); }
+ void replace(const iterator& __p, const_iterator __i,
+ const_iterator __j)
+ { replace(__p.index(), __i, __j); }
+ void replace(const iterator& __p, iterator __i, iterator __j)
+ { replace(__p.index(), __i, __j); }
+
+ // Iterator and range variants of erase
+ iterator erase(const iterator& __p, const iterator& __q) {
+ size_t __p_index = __p.index();
+ erase(__p_index, __q.index() - __p_index);
+ return iterator(this, __p_index);
+ }
+ iterator erase(const iterator& __p) {
+ size_t __p_index = __p.index();
+ erase(__p_index, 1);
+ return iterator(this, __p_index);
+ }
+
+ rope substr(size_t __start, size_t __len = 1) const {
+ return rope<_CharT,_Alloc>(
+ _S_substring(_M_tree_ptr, __start, __start + __len));
+ }
+
+ rope substr(iterator __start, iterator __end) const {
+ return rope<_CharT,_Alloc>(
+ _S_substring(_M_tree_ptr, __start.index(), __end.index()));
+ }
+
+ rope substr(iterator __start) const {
+ size_t __pos = __start.index();
+ return rope<_CharT,_Alloc>(
+ _S_substring(_M_tree_ptr, __pos, __pos + 1));
+ }
+
+ rope substr(const_iterator __start, const_iterator __end) const {
+ // This might eventually take advantage of the cache in the
+ // iterator.
+ return rope<_CharT,_Alloc>(
+ _S_substring(_M_tree_ptr, __start.index(), __end.index()));
+ }
+
+ rope<_CharT,_Alloc> substr(const_iterator __start) {
+ size_t __pos = __start.index();
+ return rope<_CharT,_Alloc>(
+ _S_substring(_M_tree_ptr, __pos, __pos + 1));
+ }
+
+ static const size_type npos;
+
+ size_type find(_CharT __c, size_type __pos = 0) const;
+ size_type find(_CharT* __s, size_type __pos = 0) const {
+ size_type __result_pos;
+ const_iterator __result = search(const_begin() + __pos, const_end(),
+ __s, __s + _S_char_ptr_len(__s));
+ __result_pos = __result.index();
+# ifndef __STL_OLD_ROPE_SEMANTICS
+ if (__result_pos == size()) __result_pos = npos;
+# endif
+ return __result_pos;
+ }
+
+ iterator mutable_begin() {
+ return(iterator(this, 0));
+ }
+
+ iterator mutable_end() {
+ return(iterator(this, size()));
+ }
# ifdef __STL_CLASS_PARTIAL_SPECIALIZATION
typedef reverse_iterator<iterator> reverse_iterator;
# else /* __STL_CLASS_PARTIAL_SPECIALIZATION */
- typedef reverse_iterator<iterator, value_type, reference,
- difference_type> reverse_iterator;
+ typedef reverse_iterator<iterator, value_type, reference,
+ difference_type> reverse_iterator;
# endif /* __STL_CLASS_PARTIAL_SPECIALIZATION */
- reverse_iterator mutable_rbegin() {
- return reverse_iterator(mutable_end());
- }
+ reverse_iterator mutable_rbegin() {
+ return reverse_iterator(mutable_end());
+ }
- reverse_iterator mutable_rend() {
- return reverse_iterator(mutable_begin());
- }
+ reverse_iterator mutable_rend() {
+ return reverse_iterator(mutable_begin());
+ }
- reference mutable_reference_at(size_type pos) {
- return reference(this, pos);
- }
+ reference mutable_reference_at(size_type __pos) {
+ return reference(this, __pos);
+ }
-# ifdef __STD_STUFF
- reference operator[] (size_type pos) {
- return charT_ref_proxy(this, pos);
- }
+# ifdef __STD_STUFF
+ reference operator[] (size_type __pos) {
+ return _char_ref_proxy(this, __pos);
+ }
- reference at(size_type pos) {
- // if (pos >= size()) throw out_of_range;
- return (*this)[pos];
- }
+ reference at(size_type __pos) {
+ // if (__pos >= size()) throw out_of_range; // XXX
+ return (*this)[__pos];
+ }
- void resize(size_type n, charT c) {}
- void resize(size_type n) {}
- void reserve(size_type res_arg = 0) {}
- size_type capacity() const {
- return max_size();
- }
+ void resize(size_type __n, _CharT __c) {}
+ void resize(size_type __n) {}
+ void reserve(size_type __res_arg = 0) {}
+ size_type capacity() const {
+ return max_size();
+ }
- // Stuff below this line is dangerous because it's error prone.
- // I would really like to get rid of it.
- // copy function with funny arg ordering.
- size_type copy(charT *buffer, size_type n, size_type pos = 0)
- const {
- return copy(pos, n, buffer);
- }
+ // Stuff below this line is dangerous because it's error prone.
+ // I would really like to get rid of it.
+ // copy function with funny arg ordering.
+ size_type copy(_CharT* __buffer, size_type __n,
+ size_type __pos = 0) const {
+ return copy(__pos, __n, __buffer);
+ }
- iterator end() { return mutable_end(); }
+ iterator end() { return mutable_end(); }
- iterator begin() { return mutable_begin(); }
+ iterator begin() { return mutable_begin(); }
- reverse_iterator rend() { return mutable_rend(); }
+ reverse_iterator rend() { return mutable_rend(); }
- reverse_iterator rbegin() { return mutable_rbegin(); }
+ reverse_iterator rbegin() { return mutable_rbegin(); }
-# else
+# else
- const_iterator end() { return const_end(); }
+ const_iterator end() { return const_end(); }
- const_iterator begin() { return const_begin(); }
+ const_iterator begin() { return const_begin(); }
- const_reverse_iterator rend() { return const_rend(); }
+ const_reverse_iterator rend() { return const_rend(); }
- const_reverse_iterator rbegin() { return const_rbegin(); }
+ const_reverse_iterator rbegin() { return const_rbegin(); }
-# endif
-
+# endif
+
};
-template <class charT, class Alloc>
-inline bool operator== (const __rope_const_iterator<charT,Alloc> & x,
- const __rope_const_iterator<charT,Alloc> & y) {
- return (x.current_pos == y.current_pos && x.root == y.root);
+template <class _CharT, class _Alloc>
+const rope<_CharT, _Alloc>::size_type rope<_CharT, _Alloc>::npos =
+ (size_type)(-1);
+
+template <class _CharT, class _Alloc>
+inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
+ const _Rope_const_iterator<_CharT,_Alloc>& __y) {
+ return (__x._M_current_pos == __y._M_current_pos &&
+ __x._M_root == __y._M_root);
}
-template <class charT, class Alloc>
-inline bool operator< (const __rope_const_iterator<charT,Alloc> & x,
- const __rope_const_iterator<charT,Alloc> & y) {
- return (x.current_pos < y.current_pos);
+template <class _CharT, class _Alloc>
+inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
+ const _Rope_const_iterator<_CharT,_Alloc>& __y) {
+ return (__x._M_current_pos < __y._M_current_pos);
}
-template <class charT, class Alloc>
-inline ptrdiff_t operator-(const __rope_const_iterator<charT,Alloc> & x,
- const __rope_const_iterator<charT,Alloc> & y) {
- return x.current_pos - y.current_pos;
+template <class _CharT, class _Alloc>
+inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
+ const _Rope_const_iterator<_CharT,_Alloc>& __y) {
+ return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
}
-template <class charT, class Alloc>
-inline __rope_const_iterator<charT,Alloc>
-operator-(const __rope_const_iterator<charT,Alloc> & x,
- ptrdiff_t n) {
- return __rope_const_iterator<charT,Alloc>(x.root, x.current_pos - n);
+template <class _CharT, class _Alloc>
+inline _Rope_const_iterator<_CharT,_Alloc>
+operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
+ return _Rope_const_iterator<_CharT,_Alloc>(
+ __x._M_root, __x._M_current_pos - __n);
}
-template <class charT, class Alloc>
-inline __rope_const_iterator<charT,Alloc>
-operator+(const __rope_const_iterator<charT,Alloc> & x,
- ptrdiff_t n) {
- return __rope_const_iterator<charT,Alloc>(x.root, x.current_pos + n);
+template <class _CharT, class _Alloc>
+inline _Rope_const_iterator<_CharT,_Alloc>
+operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
+ return _Rope_const_iterator<_CharT,_Alloc>(
+ __x._M_root, __x._M_current_pos + __n);
}
-template <class charT, class Alloc>
-inline __rope_const_iterator<charT,Alloc>
-operator+(ptrdiff_t n,
- const __rope_const_iterator<charT,Alloc> & x) {
- return __rope_const_iterator<charT,Alloc>(x.root, x.current_pos + n);
+template <class _CharT, class _Alloc>
+inline _Rope_const_iterator<_CharT,_Alloc>
+operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
+ return _Rope_const_iterator<_CharT,_Alloc>(
+ __x._M_root, __x._M_current_pos + __n);
}
-template <class charT, class Alloc>
-inline bool operator== (const __rope_iterator<charT,Alloc> & x,
- const __rope_iterator<charT,Alloc> & y) {
- return (x.current_pos == y.current_pos && x.root_rope == y.root_rope);
+template <class _CharT, class _Alloc>
+inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
+ const _Rope_iterator<_CharT,_Alloc>& __y) {
+ return (__x._M_current_pos == __y._M_current_pos &&
+ __x._M_root_rope == __y._M_root_rope);
}
-template <class charT, class Alloc>
-inline bool operator< (const __rope_iterator<charT,Alloc> & x,
- const __rope_iterator<charT,Alloc> & y) {
- return (x.current_pos < y.current_pos);
+template <class _CharT, class _Alloc>
+inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
+ const _Rope_iterator<_CharT,_Alloc>& __y) {
+ return (__x._M_current_pos < __y._M_current_pos);
}
-template <class charT, class Alloc>
-inline ptrdiff_t operator-(const __rope_iterator<charT,Alloc> & x,
- const __rope_iterator<charT,Alloc> & y) {
- return x.current_pos - y.current_pos;
+template <class _CharT, class _Alloc>
+inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
+ const _Rope_iterator<_CharT,_Alloc>& __y) {
+ return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
}
-template <class charT, class Alloc>
-inline __rope_iterator<charT,Alloc>
-operator-(const __rope_iterator<charT,Alloc> & x,
- ptrdiff_t n) {
- return __rope_iterator<charT,Alloc>(x.root_rope, x.current_pos - n);
+template <class _CharT, class _Alloc>
+inline _Rope_iterator<_CharT,_Alloc>
+operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
+ ptrdiff_t __n) {
+ return _Rope_iterator<_CharT,_Alloc>(
+ __x._M_root_rope, __x._M_current_pos - __n);
}
-template <class charT, class Alloc>
-inline __rope_iterator<charT,Alloc>
-operator+(const __rope_iterator<charT,Alloc> & x,
- ptrdiff_t n) {
- return __rope_iterator<charT,Alloc>(x.root_rope, x.current_pos + n);
+template <class _CharT, class _Alloc>
+inline _Rope_iterator<_CharT,_Alloc>
+operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
+ ptrdiff_t __n) {
+ return _Rope_iterator<_CharT,_Alloc>(
+ __x._M_root_rope, __x._M_current_pos + __n);
}
-template <class charT, class Alloc>
-inline __rope_iterator<charT,Alloc>
-operator+(ptrdiff_t n,
- const __rope_iterator<charT,Alloc> & x) {
- return __rope_iterator<charT,Alloc>(x.root_rope, x.current_pos + n);
+template <class _CharT, class _Alloc>
+inline _Rope_iterator<_CharT,_Alloc>
+operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
+ return _Rope_iterator<_CharT,_Alloc>(
+ __x._M_root_rope, __x._M_current_pos + __n);
}
-template <class charT, class Alloc>
+template <class _CharT, class _Alloc>
inline
-rope<charT,Alloc>
-operator+ (const rope<charT,Alloc> &left,
- const rope<charT,Alloc> &right)
+rope<_CharT,_Alloc>
+operator+ (const rope<_CharT,_Alloc>& __left,
+ const rope<_CharT,_Alloc>& __right)
{
- return rope<charT,Alloc>
- (rope<charT,Alloc>::concat(left.tree_ptr, right.tree_ptr));
- // Inlining this should make it possible to keep left and
- // right in registers.
+# ifdef __STL_USE_STD_ALLOCATORS
+ __stl_assert(__left.get_allocator() == __right.get_allocator());
+# endif
+ return rope<_CharT,_Alloc>(
+ rope<_CharT,_Alloc>::_S_concat(__left._M_tree_ptr, __right._M_tree_ptr));
+ // Inlining this should make it possible to keep __left and
+ // __right in registers.
}
-template <class charT, class Alloc>
+template <class _CharT, class _Alloc>
inline
-rope<charT,Alloc>&
-operator+= (rope<charT,Alloc> &left,
- const rope<charT,Alloc> &right)
+rope<_CharT,_Alloc>&
+operator+= (rope<_CharT,_Alloc>& __left,
+ const rope<_CharT,_Alloc>& __right)
{
- left.append(right);
- return left;
+ __left.append(__right);
+ return __left;
}
-template <class charT, class Alloc>
+template <class _CharT, class _Alloc>
inline
-rope<charT,Alloc>
-operator+ (const rope<charT,Alloc> &left,
- const charT* right) {
- size_t rlen = rope<charT,Alloc>::char_ptr_len(right);
- return rope<charT,Alloc>
- (rope<charT,Alloc>::concat_char_iter(left.tree_ptr, right, rlen));
+rope<_CharT,_Alloc>
+operator+ (const rope<_CharT,_Alloc>& __left,
+ const _CharT* __right) {
+ size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
+ return rope<_CharT,_Alloc>(
+ rope<_CharT,_Alloc>::_S_concat_char_iter(
+ __left._M_tree_ptr, __right, __rlen));
}
-template <class charT, class Alloc>
+template <class _CharT, class _Alloc>
inline
-rope<charT,Alloc>&
-operator+= (rope<charT,Alloc> &left,
- const charT* right) {
- left.append(right);
- return left;
+rope<_CharT,_Alloc>&
+operator+= (rope<_CharT,_Alloc>& __left,
+ const _CharT* __right) {
+ __left.append(__right);
+ return __left;
}
-template <class charT, class Alloc>
+template <class _CharT, class _Alloc>
inline
-rope<charT,Alloc>
-operator+ (const rope<charT,Alloc> &left, charT right) {
- return rope<charT,Alloc>
- (rope<charT,Alloc>::concat_char_iter(left.tree_ptr, &right, 1));
+rope<_CharT,_Alloc>
+operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
+ return rope<_CharT,_Alloc>(
+ rope<_CharT,_Alloc>::_S_concat_char_iter(
+ __left._M_tree_ptr, &__right, 1));
}
-template <class charT, class Alloc>
+template <class _CharT, class _Alloc>
inline
-rope<charT,Alloc>&
-operator+= (rope<charT,Alloc> &left, charT right) {
- left.append(right);
- return left;
+rope<_CharT,_Alloc>&
+operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
+ __left.append(__right);
+ return __left;
}
-template <class charT, class Alloc>
+template <class _CharT, class _Alloc>
bool
-operator< (const rope<charT,Alloc> &left, const rope<charT,Alloc> &right) {
- return left.compare(right) < 0;
+operator< (const rope<_CharT,_Alloc>& __left,
+ const rope<_CharT,_Alloc>& __right) {
+ return __left.compare(__right) < 0;
}
-
-template <class charT, class Alloc>
+
+template <class _CharT, class _Alloc>
bool
-operator== (const rope<charT,Alloc> &left, const rope<charT,Alloc> &right) {
- return left.compare(right) == 0;
+operator== (const rope<_CharT,_Alloc>& __left,
+ const rope<_CharT,_Alloc>& __right) {
+ return __left.compare(__right) == 0;
}
-template <class charT, class Alloc>
-inline bool operator== (const __rope_charT_ptr_proxy<charT,Alloc> & x,
- const __rope_charT_ptr_proxy<charT,Alloc> & y) {
- return (x.pos == y.pos && x.root == y.root);
+template <class _CharT, class _Alloc>
+inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
+ const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
+ return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
}
-template<class charT, class Alloc>
-ostream& operator<< (ostream& o, const rope<charT, Alloc>& r);
-
-typedef rope<char, __ALLOC> crope;
-typedef rope<wchar_t, __ALLOC> wrope;
+template<class _CharT, class _Alloc>
+ostream& operator<< (ostream& __o, const rope<_CharT,_Alloc>& __r);
+
+typedef rope<char> crope;
+typedef rope<wchar_t> wrope;
-inline crope::reference __mutable_reference_at(crope& c, size_t i)
+inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
{
- return c.mutable_reference_at(i);
+ return __c.mutable_reference_at(__i);
}
-inline wrope::reference __mutable_reference_at(wrope& c, size_t i)
+inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
{
- return c.mutable_reference_at(i);
+ return __c.mutable_reference_at(__i);
}
#ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
-template <class charT, class Alloc>
-inline void swap(rope<charT, Alloc>& x, rope<charT, Alloc>& y) {
- x.swap(y);
+template <class _CharT, class _Alloc>
+inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
+ __x.swap(__y);
}
#else
-inline void swap(crope x, crope y) { x.swap(y); }
-inline void swap(wrope x, wrope y) { x.swap(y); }
+inline void swap(crope __x, crope __y) { __x.swap(__y); }
+inline void swap(wrope __x, wrope __y) { __x.swap(__y); }
#endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
// Hash functions should probably be revisited later:
__STL_TEMPLATE_NULL struct hash<crope>
{
- size_t operator()(const crope& str) const
+ size_t operator()(const crope& __str) const
{
- size_t sz = str.size();
+ size_t __size = __str.size();
- if (0 == sz) return 0;
- return 13*str[0] + 5*str[sz - 1] + sz;
+ if (0 == __size) return 0;
+ return 13*__str[0] + 5*__str[__size - 1] + __size;
}
};
__STL_TEMPLATE_NULL struct hash<wrope>
{
- size_t operator()(const wrope& str) const
+ size_t operator()(const wrope& __str) const
{
- size_t sz = str.size();
+ size_t __size = __str.size();
- if (0 == sz) return 0;
- return 13*str[0] + 5*str[sz - 1] + sz;
+ if (0 == __size) return 0;
+ return 13*__str[0] + 5*__str[__size - 1] + __size;
}
};
@@ -2105,6 +2533,7 @@ __STL_TEMPLATE_NULL struct hash<wrope>
__STL_END_NAMESPACE
# include <ropeimpl.h>
+
# endif /* __SGI_STL_INTERNAL_ROPE_H */
// Local Variables:
OpenPOWER on IntegriCloud