43 #ifndef _POOL_ALLOCATOR_H
44 #define _POOL_ALLOCATOR_H 1
54 namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
56 _GLIBCXX_BEGIN_NAMESPACE_VERSION
80 enum { _S_align = 8 };
81 enum { _S_max_bytes = 128 };
82 enum { _S_free_list_size = (size_t)_S_max_bytes / (
size_t)_S_align };
86 union _Obj* _M_free_list_link;
87 char _M_client_data[1];
90 static _Obj*
volatile _S_free_list[_S_free_list_size];
93 static char* _S_start_free;
94 static char* _S_end_free;
95 static size_t _S_heap_size;
98 _M_round_up(
size_t __bytes)
99 {
return ((__bytes + (
size_t)_S_align - 1) & ~((
size_t)_S_align - 1)); }
101 _GLIBCXX_CONST _Obj*
volatile*
102 _M_get_free_list(
size_t __bytes)
throw ();
105 _M_get_mutex()
throw ();
110 _M_refill(
size_t __n);
115 _M_allocate_chunk(
size_t __n,
int& __nobjs);
123 template<
typename _Tp>
127 static _Atomic_word _S_force_new;
130 typedef size_t size_type;
131 typedef ptrdiff_t difference_type;
132 typedef _Tp* pointer;
133 typedef const _Tp* const_pointer;
134 typedef _Tp& reference;
135 typedef const _Tp& const_reference;
136 typedef _Tp value_type;
138 template<
typename _Tp1>
146 template<
typename _Tp1>
152 address(reference __x)
const {
return std::__addressof(__x); }
155 address(const_reference __x)
const {
return std::__addressof(__x); }
158 max_size()
const throw()
159 {
return size_t(-1) /
sizeof(_Tp); }
164 construct(pointer __p,
const _Tp& __val)
165 { ::new((
void *)__p) _Tp(__val); }
167 #ifdef __GXX_EXPERIMENTAL_CXX0X__
168 template<
typename... _Args>
170 construct(pointer __p, _Args&&... __args)
171 { ::new((
void *)__p) _Tp(std::forward<_Args>(__args)...); }
175 destroy(pointer __p) { __p->~_Tp(); }
178 allocate(size_type __n,
const void* = 0);
181 deallocate(pointer __p, size_type __n);
184 template<
typename _Tp>
189 template<
typename _Tp>
191 operator!=(
const __pool_alloc<_Tp>&,
const __pool_alloc<_Tp>&)
194 template<
typename _Tp>
196 __pool_alloc<_Tp>::_S_force_new;
198 template<
typename _Tp>
200 __pool_alloc<_Tp>::allocate(size_type __n,
const void*)
203 if (__builtin_expect(__n != 0,
true))
205 if (__n > this->max_size())
206 std::__throw_bad_alloc();
211 if (_S_force_new == 0)
213 if (std::getenv(
"GLIBCXX_FORCE_NEW"))
214 __atomic_add_dispatch(&_S_force_new, 1);
216 __atomic_add_dispatch(&_S_force_new, -1);
219 const size_t __bytes = __n *
sizeof(_Tp);
220 if (__bytes >
size_t(_S_max_bytes) || _S_force_new > 0)
221 __ret = static_cast<_Tp*>(::
operator new(__bytes));
224 _Obj*
volatile* __free_list = _M_get_free_list(__bytes);
226 __scoped_lock sentry(_M_get_mutex());
227 _Obj* __restrict__ __result = *__free_list;
228 if (__builtin_expect(__result == 0, 0))
229 __ret =
static_cast<_Tp*
>(_M_refill(_M_round_up(__bytes)));
232 *__free_list = __result->_M_free_list_link;
233 __ret =
reinterpret_cast<_Tp*
>(__result);
236 std::__throw_bad_alloc();
242 template<
typename _Tp>
244 __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
246 if (__builtin_expect(__n != 0 && __p != 0,
true))
248 const size_t __bytes = __n *
sizeof(_Tp);
249 if (__bytes > static_cast<size_t>(_S_max_bytes) || _S_force_new > 0)
250 ::
operator delete(__p);
253 _Obj*
volatile* __free_list = _M_get_free_list(__bytes);
254 _Obj* __q =
reinterpret_cast<_Obj*
>(__p);
256 __scoped_lock sentry(_M_get_mutex());
257 __q ->_M_free_list_link = *__free_list;
263 _GLIBCXX_END_NAMESPACE_VERSION
Allocator using a memory pool with a single lock.
Base class for __pool_alloc.