stl_alloc.h

Go to the documentation of this file.
00001 // Allocators -*- C++ -*-
00002 
00003 // Copyright (C) 2001, 2002 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 2, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // You should have received a copy of the GNU General Public License along
00017 // with this library; see the file COPYING.  If not, write to the Free
00018 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307,
00019 // USA.
00020 
00021 // As a special exception, you may use this file as part of a free software
00022 // library without restriction.  Specifically, if other files instantiate
00023 // templates or use macros or inline functions from this file, or you compile
00024 // this file and link it with other files to produce an executable, this
00025 // file does not by itself cause the resulting executable to be covered by
00026 // the GNU General Public License.  This exception does not however
00027 // invalidate any other reasons why the executable file might be covered by
00028 // the GNU General Public License.
00029 
00030 /*
00031  * Copyright (c) 1996-1997
00032  * Silicon Graphics Computer Systems, Inc.
00033  *
00034  * Permission to use, copy, modify, distribute and sell this software
00035  * and its documentation for any purpose is hereby granted without fee,
00036  * provided that the above copyright notice appear in all copies and
00037  * that both that copyright notice and this permission notice appear
00038  * in supporting documentation.  Silicon Graphics makes no
00039  * representations about the suitability of this software for any
00040  * purpose.  It is provided "as is" without express or implied warranty.
00041  */
00042 
00048 #ifndef __GLIBCPP_INTERNAL_ALLOC_H
00049 #define __GLIBCPP_INTERNAL_ALLOC_H
00050 
00081 #include <cstddef>
00082 #include <cstdlib>
00083 #include <cstring>
00084 #include <cassert>
00085 #include <bits/functexcept.h>   // For __throw_bad_alloc
00086 #include <bits/stl_threads.h>
00087 
00088 namespace std
00089 {
00098   class __new_alloc 
00099   {
00100   public:
00101     static void* 
00102     allocate(size_t __n)
00103     { return ::operator new(__n); }
00104     
00105     static void 
00106     deallocate(void* __p, size_t)
00107     { ::operator delete(__p); }
00108   };
00109   
00110 
00121   template <int __inst>
00122     class __malloc_alloc_template 
00123     {
00124     private:
00125       static void* _S_oom_malloc(size_t);
00126       static void* _S_oom_realloc(void*, size_t);
00127       static void (* __malloc_alloc_oom_handler)();
00128       
00129     public:
00130       static void* 
00131       allocate(size_t __n)
00132       {
00133     void* __result = malloc(__n);
00134     if (0 == __result) __result = _S_oom_malloc(__n);
00135     return __result;
00136       }
00137 
00138       static void 
00139       deallocate(void* __p, size_t /* __n */)
00140       { free(__p); }
00141 
00142       static void* 
00143       reallocate(void* __p, size_t /* old_sz */, size_t __new_sz)
00144       {
00145     void* __result = realloc(__p, __new_sz);
00146     if (0 == __result) __result = _S_oom_realloc(__p, __new_sz);
00147     return __result;
00148       }
00149       
00150       static void (* __set_malloc_handler(void (*__f)()))()
00151       {
00152     void (* __old)() = __malloc_alloc_oom_handler;
00153     __malloc_alloc_oom_handler = __f;
00154     return(__old);
00155       }
00156     };
00157 
00158   // malloc_alloc out-of-memory handling
00159   template <int __inst>
00160     void (* __malloc_alloc_template<__inst>::__malloc_alloc_oom_handler)() = 0;
00161 
00162   template <int __inst>
00163     void*
00164     __malloc_alloc_template<__inst>::_S_oom_malloc(size_t __n)
00165     {
00166       void (* __my_malloc_handler)();
00167       void* __result;
00168       
00169       for (;;) 
00170     {
00171       __my_malloc_handler = __malloc_alloc_oom_handler;
00172       if (0 == __my_malloc_handler) 
00173         std::__throw_bad_alloc();
00174       (*__my_malloc_handler)();
00175       __result = malloc(__n);
00176       if (__result) 
00177         return(__result);
00178     }
00179     }
00180   
00181   template <int __inst>
00182     void* 
00183     __malloc_alloc_template<__inst>::_S_oom_realloc(void* __p, size_t __n)
00184     { 
00185       void (* __my_malloc_handler)();
00186       void* __result;
00187       
00188       for (;;) 
00189     {
00190       __my_malloc_handler = __malloc_alloc_oom_handler;
00191       if (0 == __my_malloc_handler) 
00192         std::__throw_bad_alloc();
00193       (*__my_malloc_handler)();
00194       __result = realloc(__p, __n);
00195       if (__result) 
00196         return(__result);
00197     }
00198     }
00199 
00200 
00201 // Determines the underlying allocator choice for the node allocator.
00202 #ifdef __USE_MALLOC
00203   typedef __malloc_alloc_template<0>  __mem_interface;
00204 #else
00205   typedef __new_alloc                 __mem_interface;
00206 #endif
00207 
00208 
00219   template<class _Tp, class _Alloc>
00220   class __simple_alloc
00221   {
00222   public:
00223     static _Tp* allocate(size_t __n)
00224     { return 0 == __n ? 0 : (_Tp*) _Alloc::allocate(__n * sizeof (_Tp)); }
00225 
00226     static _Tp* allocate()
00227     { return (_Tp*) _Alloc::allocate(sizeof (_Tp)); }
00228 
00229     static void deallocate(_Tp* __p, size_t __n)
00230     { if (0 != __n) _Alloc::deallocate(__p, __n * sizeof (_Tp)); }
00231 
00232     static void deallocate(_Tp* __p)
00233     { _Alloc::deallocate(__p, sizeof (_Tp)); }
00234   };
00235 
00236 
00251   template <class _Alloc>
00252   class __debug_alloc
00253   {
00254   private:
00255     enum {_S_extra = 8};  // Size of space used to store size.  Note that this
00256                           // must be large enough to preserve alignment.
00257   
00258   public:
00259   
00260     static void* allocate(size_t __n)
00261     {
00262       char* __result = (char*)_Alloc::allocate(__n + (int) _S_extra);
00263       *(size_t*)__result = __n;
00264       return __result + (int) _S_extra;
00265     }
00266   
00267     static void deallocate(void* __p, size_t __n)
00268     {
00269       char* __real_p = (char*)__p - (int) _S_extra;
00270       assert(*(size_t*)__real_p == __n);
00271       _Alloc::deallocate(__real_p, __n + (int) _S_extra);
00272     }
00273   
00274     static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz)
00275     {
00276       char* __real_p = (char*)__p - (int) _S_extra;
00277       assert(*(size_t*)__real_p == __old_sz);
00278       char* __result = (char*)
00279         _Alloc::reallocate(__real_p, __old_sz + (int) _S_extra,
00280                                      __new_sz + (int) _S_extra);
00281       *(size_t*)__result = __new_sz;
00282       return __result + (int) _S_extra;
00283     }
00284   };
00285 
00286 
00287 #ifdef __USE_MALLOC
00288 
00289 typedef __mem_interface __alloc;
00290 typedef __mem_interface __single_client_alloc;
00291 
00292 #else
00293 
00294 
00324 template<bool __threads, int __inst>
00325   class __default_alloc_template
00326   {
00327   private:
00328     enum {_ALIGN = 8};
00329     enum {_MAX_BYTES = 128};
00330     enum {_NFREELISTS = _MAX_BYTES / _ALIGN};
00331     
00332     union _Obj 
00333     {
00334       union _Obj* _M_free_list_link;
00335       char        _M_client_data[1];    // The client sees this.
00336     };
00337 
00338     static _Obj* volatile   _S_free_list[_NFREELISTS]; 
00339 
00340     // Chunk allocation state.
00341     static char*        _S_start_free;
00342     static char*        _S_end_free;
00343     static size_t       _S_heap_size;
00344     
00345     static _STL_mutex_lock  _S_node_allocator_lock;
00346 
00347     static size_t
00348     _S_round_up(size_t __bytes) 
00349     { return (((__bytes) + (size_t) _ALIGN-1) & ~((size_t) _ALIGN - 1)); }
00350 
00351     static size_t 
00352     _S_freelist_index(size_t __bytes)
00353     { return (((__bytes) + (size_t)_ALIGN-1)/(size_t)_ALIGN - 1); }
00354 
00355     // Returns an object of size __n, and optionally adds to size __n
00356     // free list.
00357     static void* 
00358     _S_refill(size_t __n);
00359 
00360     // Allocates a chunk for nobjs of size size.  nobjs may be reduced
00361     // if it is inconvenient to allocate the requested number.
00362     static char* 
00363     _S_chunk_alloc(size_t __size, int& __nobjs);
00364     
00365     // It would be nice to use _STL_auto_lock here.  But we need a
00366     // test whether threads are in use.
00367     class _Lock 
00368     {
00369     public:
00370       _Lock() { if (__threads) _S_node_allocator_lock._M_acquire_lock(); }
00371       ~_Lock() { if (__threads) _S_node_allocator_lock._M_release_lock(); }
00372     } __attribute__ ((__unused__));
00373     friend class _Lock;
00374     
00375   public:
00376     // __n must be > 0
00377     static void* 
00378     allocate(size_t __n)
00379     {
00380       void* __ret = 0;
00381       
00382       if (__n > (size_t) _MAX_BYTES) 
00383     __ret = __mem_interface::allocate(__n);
00384       else 
00385     {
00386       _Obj* volatile* __my_free_list = _S_free_list 
00387         + _S_freelist_index(__n);
00388       // Acquire the lock here with a constructor call.  This
00389       // ensures that it is released in exit or during stack
00390       // unwinding.
00391       _Lock __lock_instance;
00392       _Obj* __restrict__ __result = *__my_free_list;
00393       if (__result == 0)
00394         __ret = _S_refill(_S_round_up(__n));
00395       else 
00396         {
00397           *__my_free_list = __result -> _M_free_list_link;
00398           __ret = __result;
00399         }
00400     }
00401       return __ret;
00402     };
00403 
00404     // __p may not be 0
00405     static void 
00406     deallocate(void* __p, size_t __n)
00407     {
00408       if (__n > (size_t) _MAX_BYTES)
00409     __mem_interface::deallocate(__p, __n);
00410       else 
00411     {
00412       _Obj* volatile*  __my_free_list
00413         = _S_free_list + _S_freelist_index(__n);
00414       _Obj* __q = (_Obj*)__p;
00415       
00416       // Acquire the lock here with a constructor call.  This ensures that
00417       // it is released in exit or during stack unwinding.
00418       _Lock __lock_instance;
00419       __q -> _M_free_list_link = *__my_free_list;
00420       *__my_free_list = __q;
00421     }
00422     }
00423     
00424     static void* 
00425     reallocate(void* __p, size_t __old_sz, size_t __new_sz);
00426   };
00427 
00428 
00429   template<bool __threads, int __inst>
00430     inline bool 
00431     operator==(const __default_alloc_template<__threads, __inst>&,
00432            const __default_alloc_template<__threads, __inst>&)
00433     { return true; }
00434 
00435   template<bool __threads, int __inst>
00436     inline bool 
00437     operator!=(const __default_alloc_template<__threads, __inst>&,
00438            const __default_alloc_template<__threads, __inst>&)
00439     { return false; }
00440 
00441 
00442   // We allocate memory in large chunks in order to avoid fragmenting the
00443   // malloc heap (or whatever __mem_interface is using) too much.  We assume
00444   // that __size is properly aligned.  We hold the allocation lock.
00445   template<bool __threads, int __inst>
00446     char*
00447     __default_alloc_template<__threads, __inst>::_S_chunk_alloc(size_t __size, 
00448                                 int& __nobjs)
00449     {
00450       char* __result;
00451       size_t __total_bytes = __size * __nobjs;
00452       size_t __bytes_left = _S_end_free - _S_start_free;
00453       
00454       if (__bytes_left >= __total_bytes) 
00455       {
00456         __result = _S_start_free;
00457         _S_start_free += __total_bytes;
00458         return(__result);
00459       } 
00460       else if (__bytes_left >= __size) 
00461     {
00462       __nobjs = (int)(__bytes_left/__size);
00463       __total_bytes = __size * __nobjs;
00464       __result = _S_start_free;
00465       _S_start_free += __total_bytes;
00466       return(__result);
00467     } 
00468       else 
00469     {
00470       size_t __bytes_to_get = 
00471         2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
00472       // Try to make use of the left-over piece.
00473       if (__bytes_left > 0) 
00474         {
00475           _Obj* volatile* __my_free_list =
00476         _S_free_list + _S_freelist_index(__bytes_left);
00477           
00478           ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
00479           *__my_free_list = (_Obj*)_S_start_free;
00480         }
00481       _S_start_free = (char*) __mem_interface::allocate(__bytes_to_get);
00482       if (0 == _S_start_free) 
00483         {
00484           size_t __i;
00485           _Obj* volatile* __my_free_list;
00486           _Obj* __p;
00487           // Try to make do with what we have.  That can't hurt.  We
00488           // do not try smaller requests, since that tends to result
00489           // in disaster on multi-process machines.
00490           __i = __size;
00491           for (; __i <= (size_t) _MAX_BYTES; __i += (size_t) _ALIGN) 
00492         {
00493           __my_free_list = _S_free_list + _S_freelist_index(__i);
00494           __p = *__my_free_list;
00495           if (0 != __p) 
00496             {
00497               *__my_free_list = __p -> _M_free_list_link;
00498               _S_start_free = (char*)__p;
00499               _S_end_free = _S_start_free + __i;
00500               return(_S_chunk_alloc(__size, __nobjs));
00501               // Any leftover piece will eventually make it to the
00502               // right free list.
00503             }
00504         }
00505           _S_end_free = 0;  // In case of exception.
00506           _S_start_free = (char*)__mem_interface::allocate(__bytes_to_get);
00507           // This should either throw an exception or remedy the situation.
00508           // Thus we assume it succeeded.
00509         }
00510       _S_heap_size += __bytes_to_get;
00511       _S_end_free = _S_start_free + __bytes_to_get;
00512       return(_S_chunk_alloc(__size, __nobjs));
00513     }
00514     }
00515   
00516   
00517   // Returns an object of size __n, and optionally adds to "size
00518   // __n"'s free list.  We assume that __n is properly aligned.  We
00519   // hold the allocation lock.
00520   template<bool __threads, int __inst>
00521     void*
00522     __default_alloc_template<__threads, __inst>::_S_refill(size_t __n)
00523     {
00524       int __nobjs = 20;
00525       char* __chunk = _S_chunk_alloc(__n, __nobjs);
00526       _Obj* volatile* __my_free_list;
00527       _Obj* __result;
00528       _Obj* __current_obj;
00529       _Obj* __next_obj;
00530       int __i;
00531       
00532       if (1 == __nobjs) return(__chunk);
00533       __my_free_list = _S_free_list + _S_freelist_index(__n);
00534       
00535       /* Build free list in chunk */
00536       __result = (_Obj*)__chunk;
00537       *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
00538       for (__i = 1; ; __i++) {
00539         __current_obj = __next_obj;
00540         __next_obj = (_Obj*)((char*)__next_obj + __n);
00541         if (__nobjs - 1 == __i) {
00542       __current_obj -> _M_free_list_link = 0;
00543       break;
00544         } else {
00545       __current_obj -> _M_free_list_link = __next_obj;
00546         }
00547       }
00548       return(__result);
00549     }
00550 
00551 
00552   template<bool threads, int inst>
00553     void*
00554     __default_alloc_template<threads, inst>::reallocate(void* __p, 
00555                             size_t __old_sz,
00556                             size_t __new_sz)
00557     {
00558       void* __result;
00559       size_t __copy_sz;
00560       
00561       if (__old_sz > (size_t) _MAX_BYTES && __new_sz > (size_t) _MAX_BYTES) {
00562         return(realloc(__p, __new_sz));
00563       }
00564       if (_S_round_up(__old_sz) == _S_round_up(__new_sz)) return(__p);
00565       __result = allocate(__new_sz);
00566       __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
00567       memcpy(__result, __p, __copy_sz);
00568       deallocate(__p, __old_sz);
00569       return(__result);
00570     }
00571   
00572   template<bool __threads, int __inst>
00573   _STL_mutex_lock
00574   __default_alloc_template<__threads, __inst>::_S_node_allocator_lock
00575   __STL_MUTEX_INITIALIZER;
00576   
00577   template<bool __threads, int __inst>
00578   char* __default_alloc_template<__threads, __inst>::_S_start_free = 0;
00579   
00580   template<bool __threads, int __inst>
00581   char* __default_alloc_template<__threads, __inst>::_S_end_free = 0;
00582   
00583   template<bool __threads, int __inst>
00584   size_t __default_alloc_template<__threads, __inst>::_S_heap_size = 0;
00585   
00586   template<bool __threads, int __inst>
00587   typename __default_alloc_template<__threads, __inst>::_Obj* volatile
00588   __default_alloc_template<__threads, __inst>::_S_free_list[_NFREELISTS];
00589   
00590   typedef __default_alloc_template<true, 0>    __alloc;
00591   typedef __default_alloc_template<false, 0>   __single_client_alloc;
00592 
00593 
00594 #endif /* ! __USE_MALLOC */
00595 
00596 
00613 template <class _Tp>
00614 class allocator
00615 {
00616   typedef __alloc _Alloc;          // The underlying allocator.
00617 public:
00618   typedef size_t     size_type;
00619   typedef ptrdiff_t  difference_type;
00620   typedef _Tp*       pointer;
00621   typedef const _Tp* const_pointer;
00622   typedef _Tp&       reference;
00623   typedef const _Tp& const_reference;
00624   typedef _Tp        value_type;
00625 
00626   template <class _Tp1> struct rebind {
00627     typedef allocator<_Tp1> other;
00628   };
00629 
00630   allocator() throw() {}
00631   allocator(const allocator&) throw() {}
00632   template <class _Tp1> allocator(const allocator<_Tp1>&) throw() {}
00633   ~allocator() throw() {}
00634 
00635   pointer address(reference __x) const { return &__x; }
00636   const_pointer address(const_reference __x) const { return &__x; }
00637 
00638   // __n is permitted to be 0.  The C++ standard says nothing about what
00639   // the return value is when __n == 0.
00640   _Tp* allocate(size_type __n, const void* = 0) {
00641     return __n != 0 ? static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp))) 
00642                     : 0;
00643   }
00644 
00645   // __p is not permitted to be a null pointer.
00646   void deallocate(pointer __p, size_type __n)
00647     { _Alloc::deallocate(__p, __n * sizeof(_Tp)); }
00648 
00649   size_type max_size() const throw() 
00650     { return size_t(-1) / sizeof(_Tp); }
00651 
00652   void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
00653   void destroy(pointer __p) { __p->~_Tp(); }
00654 };
00655 
00656 template<>
00657 class allocator<void> {
00658 public:
00659   typedef size_t      size_type;
00660   typedef ptrdiff_t   difference_type;
00661   typedef void*       pointer;
00662   typedef const void* const_pointer;
00663   typedef void        value_type;
00664 
00665   template <class _Tp1> struct rebind {
00666     typedef allocator<_Tp1> other;
00667   };
00668 };
00669 
00670 
00671 template <class _T1, class _T2>
00672 inline bool operator==(const allocator<_T1>&, const allocator<_T2>&) 
00673 {
00674   return true;
00675 }
00676 
00677 template <class _T1, class _T2>
00678 inline bool operator!=(const allocator<_T1>&, const allocator<_T2>&)
00679 {
00680   return false;
00681 }
00682 
00683 
00695 template <class _Tp, class _Alloc>
00696 struct __allocator
00697 {
00698   _Alloc __underlying_alloc;
00699 
00700   typedef size_t    size_type;
00701   typedef ptrdiff_t difference_type;
00702   typedef _Tp*       pointer;
00703   typedef const _Tp* const_pointer;
00704   typedef _Tp&       reference;
00705   typedef const _Tp& const_reference;
00706   typedef _Tp        value_type;
00707 
00708   template <class _Tp1> struct rebind {
00709     typedef __allocator<_Tp1, _Alloc> other;
00710   };
00711 
00712   __allocator() throw() {}
00713   __allocator(const __allocator& __a) throw()
00714     : __underlying_alloc(__a.__underlying_alloc) {}
00715   template <class _Tp1> 
00716   __allocator(const __allocator<_Tp1, _Alloc>& __a) throw()
00717     : __underlying_alloc(__a.__underlying_alloc) {}
00718   ~__allocator() throw() {}
00719 
00720   pointer address(reference __x) const { return &__x; }
00721   const_pointer address(const_reference __x) const { return &__x; }
00722 
00723   // __n is permitted to be 0.
00724   _Tp* allocate(size_type __n, const void* = 0) {
00725     return __n != 0 
00726         ? static_cast<_Tp*>(__underlying_alloc.allocate(__n * sizeof(_Tp))) 
00727         : 0;
00728   }
00729 
00730   // __p is not permitted to be a null pointer.
00731   void deallocate(pointer __p, size_type __n)
00732     { __underlying_alloc.deallocate(__p, __n * sizeof(_Tp)); }
00733 
00734   size_type max_size() const throw() 
00735     { return size_t(-1) / sizeof(_Tp); }
00736 
00737   void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
00738   void destroy(pointer __p) { __p->~_Tp(); }
00739 };
00740 
00741 template <class _Alloc>
00742 class __allocator<void, _Alloc> {
00743   typedef size_t      size_type;
00744   typedef ptrdiff_t   difference_type;
00745   typedef void*       pointer;
00746   typedef const void* const_pointer;
00747   typedef void        value_type;
00748 
00749   template <class _Tp1> struct rebind {
00750     typedef __allocator<_Tp1, _Alloc> other;
00751   };
00752 };
00753 
00754 template <class _Tp, class _Alloc>
00755 inline bool operator==(const __allocator<_Tp, _Alloc>& __a1,
00756                        const __allocator<_Tp, _Alloc>& __a2)
00757 {
00758   return __a1.__underlying_alloc == __a2.__underlying_alloc;
00759 }
00760 
00761 template <class _Tp, class _Alloc>
00762 inline bool operator!=(const __allocator<_Tp, _Alloc>& __a1,
00763                        const __allocator<_Tp, _Alloc>& __a2)
00764 {
00765   return __a1.__underlying_alloc != __a2.__underlying_alloc;
00766 }
00767 
00768 
00770 
00774 template <int inst>
00775 inline bool operator==(const __malloc_alloc_template<inst>&,
00776                        const __malloc_alloc_template<inst>&)
00777 {
00778   return true;
00779 }
00780 
00781 template <int __inst>
00782 inline bool operator!=(const __malloc_alloc_template<__inst>&,
00783                        const __malloc_alloc_template<__inst>&)
00784 {
00785   return false;
00786 }
00787 
00788 template <class _Alloc>
00789 inline bool operator==(const __debug_alloc<_Alloc>&,
00790                        const __debug_alloc<_Alloc>&) {
00791   return true;
00792 }
00793 
00794 template <class _Alloc>
00795 inline bool operator!=(const __debug_alloc<_Alloc>&,
00796                        const __debug_alloc<_Alloc>&) {
00797   return false;
00798 }
00800 
00801 
00839 // The fully general version.
00840 template <class _Tp, class _Allocator>
00841 struct _Alloc_traits
00842 {
00843   static const bool _S_instanceless = false;
00844   typedef typename _Allocator::template rebind<_Tp>::other allocator_type;
00845 };
00846 
00847 template <class _Tp, class _Allocator>
00848 const bool _Alloc_traits<_Tp, _Allocator>::_S_instanceless;
00849 
00851 template <class _Tp, class _Tp1>
00852 struct _Alloc_traits<_Tp, allocator<_Tp1> >
00853 {
00854   static const bool _S_instanceless = true;
00855   typedef __simple_alloc<_Tp, __alloc> _Alloc_type;
00856   typedef allocator<_Tp> allocator_type;
00857 };
00859 
00861 
00862 template <class _Tp, int __inst>
00863 struct _Alloc_traits<_Tp, __malloc_alloc_template<__inst> >
00864 {
00865   static const bool _S_instanceless = true;
00866   typedef __simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
00867   typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
00868 };
00869 
00870 #ifndef __USE_MALLOC
00871 template <class _Tp, bool __threads, int __inst>
00872 struct _Alloc_traits<_Tp, __default_alloc_template<__threads, __inst> >
00873 {
00874   static const bool _S_instanceless = true;
00875   typedef __simple_alloc<_Tp, __default_alloc_template<__threads, __inst> > 
00876           _Alloc_type;
00877   typedef __allocator<_Tp, __default_alloc_template<__threads, __inst> > 
00878           allocator_type;
00879 };
00880 #endif
00881 
00882 template <class _Tp, class _Alloc>
00883 struct _Alloc_traits<_Tp, __debug_alloc<_Alloc> >
00884 {
00885   static const bool _S_instanceless = true;
00886   typedef __simple_alloc<_Tp, __debug_alloc<_Alloc> > _Alloc_type;
00887   typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type;
00888 };
00890 
00892 
00893 template <class _Tp, class _Tp1, int __inst>
00894 struct _Alloc_traits<_Tp, 
00895                      __allocator<_Tp1, __malloc_alloc_template<__inst> > >
00896 {
00897   static const bool _S_instanceless = true;
00898   typedef __simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
00899   typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
00900 };
00901 
00902 #ifndef __USE_MALLOC
00903 template <class _Tp, class _Tp1, bool __thr, int __inst>
00904 struct _Alloc_traits<_Tp, 
00905                       __allocator<_Tp1, 
00906                                   __default_alloc_template<__thr, __inst> > >
00907 {
00908   static const bool _S_instanceless = true;
00909   typedef __simple_alloc<_Tp, __default_alloc_template<__thr,__inst> > 
00910           _Alloc_type;
00911   typedef __allocator<_Tp, __default_alloc_template<__thr,__inst> > 
00912           allocator_type;
00913 };
00914 #endif
00915 
00916 template <class _Tp, class _Tp1, class _Alloc>
00917 struct _Alloc_traits<_Tp, __allocator<_Tp1, __debug_alloc<_Alloc> > >
00918 {
00919   static const bool _S_instanceless = true;
00920   typedef __simple_alloc<_Tp, __debug_alloc<_Alloc> > _Alloc_type;
00921   typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type;
00922 };
00924 
00925   // Inhibit implicit instantiations for required instantiations,
00926   // which are defined via explicit instantiations elsewhere.  
00927   // NB: This syntax is a GNU extension.
00928   extern template class allocator<char>;
00929   extern template class allocator<wchar_t>;
00930 #ifdef __USE_MALLOC
00931   extern template class __malloc_alloc_template<0>;
00932 #else
00933   extern template class __default_alloc_template<true, 0>;
00934 #endif
00935 } // namespace std
00936 
00937 #endif /* __GLIBCPP_INTERNAL_ALLOC_H */
00938 
00939 // Local Variables:
00940 // mode:C++
00941 // End:

Generated on Wed May 1 19:19:35 2002 for libstdc++-v3 Source by doxygen1.2.15