39 #ifndef _STATIC_MEM_POOL_H 40 #define _STATIC_MEM_POOL_H 48 #include "class_level_lock.h" 49 #include "mem_pool_base.h" 52 # if (defined(_MSC_VER) && _MSC_VER < 1300) \ 53 || (defined(__BORLANDC__) && __BORLANDC__ < 0x600) 54 # define __PRIVATE public 56 # define __PRIVATE private 60 # ifdef _STATIC_MEM_POOL_DEBUG 62 # define _STATIC_MEM_POOL_TRACE(_Lck, _Msg) \ 65 static_mem_pool_set::lock __guard; \ 66 std::cerr << "static_mem_pool: " << _Msg << std::endl; \ 68 std::cerr << "static_mem_pool: " << _Msg << std::endl; \ 72 # define _STATIC_MEM_POOL_TRACE(_Lck, _Msg) \ 83 typedef class_level_lock<static_mem_pool_set>::lock lock;
86 void add(mem_pool_base* __memory_pool_p);
93 typedef std::vector<mem_pool_base*> container_type;
94 container_type _M_memory_pool_set;
111 template <
size_t _Sz,
int _Gid = -1>
114 typedef typename class_level_lock<static_mem_pool<_Sz, _Gid>, (_Gid < 0)>
130 _S_instance_p = _S_create_instance();
132 return *_S_instance_p;
143 assert(_S_instance_p != NULL);
144 return *_S_instance_p;
158 if (_S_memory_block_p)
160 void* __result = _S_memory_block_p;
161 _S_memory_block_p = _S_memory_block_p->_M_next;
165 return _S_alloc_sys(_S_align(_Sz));
174 assert(__ptr != NULL);
176 _Block_list* __block =
reinterpret_cast<_Block_list*
>(__ptr);
177 __block->_M_next = _S_memory_block_p;
178 _S_memory_block_p = __block;
185 _STATIC_MEM_POOL_TRACE(
true,
"static_mem_pool<" << _Sz <<
',' 186 << _Gid <<
"> is created");
193 _Block_list* __block = _S_memory_block_p;
196 _Block_list* __next = __block->_M_next;
197 dealloc_sys(__block);
200 _S_memory_block_p = NULL;
202 _S_instance_p = NULL;
204 _STATIC_MEM_POOL_TRACE(
false,
"static_mem_pool<" << _Sz <<
',' 205 << _Gid <<
"> is destroyed");
207 static size_t _S_align(
size_t __size)
209 return __size >=
sizeof(_Block_list) ? __size :
sizeof(_Block_list);
211 static void* _S_alloc_sys(
size_t __size);
214 static bool _S_destroyed;
216 static mem_pool_base::_Block_list* _S_memory_block_p;
223 template <
size_t _Sz,
int _G
id>
bool 225 template <
size_t _Sz,
int _G
id> mem_pool_base::_Block_list*
235 template <
size_t _Sz,
int _G
id>
242 _Block_list* __block = _S_memory_block_p;
245 if (_Block_list* __temp = __block->_M_next)
247 _Block_list* __next = __temp->_M_next;
248 __block->_M_next = __next;
257 _STATIC_MEM_POOL_TRACE(
false,
"static_mem_pool<" << _Sz <<
',' 258 << _Gid <<
"> is recycled");
261 template <
size_t _Sz,
int _G
id>
264 static_mem_pool_set::lock __guard;
265 void* __result = mem_pool_base::alloc_sys(__size);
268 static_mem_pool_set::instance().recycle();
269 __result = mem_pool_base::alloc_sys(__size);
274 template <
size_t _Sz,
int _G
id>
278 throw std::runtime_error(
"dead reference detected");
280 static_mem_pool_set::instance();
284 static_mem_pool_set::instance().add(__inst_p);
288 _STATIC_MEM_POOL_TRACE(
true,
289 "Exception occurs in static_mem_pool_set::add");
291 delete static_cast<mem_pool_base*
>(__inst_p);
297 #define DECLARE_STATIC_MEM_POOL(_Cls) \ 299 static void* operator new(size_t __size) \ 301 assert(__size == sizeof(_Cls)); \ 303 __ptr = static_mem_pool<sizeof(_Cls)>:: \ 304 instance_known().allocate(); \ 306 throw std::bad_alloc(); \ 309 static void operator delete(void* __ptr) \ 312 static_mem_pool<sizeof(_Cls)>:: \ 313 instance_known().deallocate(__ptr); \ 316 #define DECLARE_STATIC_MEM_POOL__NOTHROW(_Cls) \ 318 static void* operator new(size_t __size) throw() \ 320 assert(__size == sizeof(_Cls)); \ 321 return static_mem_pool<sizeof(_Cls)>:: \ 322 instance_known().allocate(); \ 324 static void operator delete(void* __ptr) \ 327 static_mem_pool<sizeof(_Cls)>:: \ 328 instance_known().deallocate(__ptr); \ 331 #define DECLARE_STATIC_MEM_POOL_GROUPED(_Cls, _Gid) \ 333 static void* operator new(size_t __size) \ 335 assert(__size == sizeof(_Cls)); \ 337 __ptr = static_mem_pool<sizeof(_Cls), (_Gid)>:: \ 338 instance_known().allocate(); \ 340 throw std::bad_alloc(); \ 343 static void operator delete(void* __ptr) \ 346 static_mem_pool<sizeof(_Cls), (_Gid)>:: \ 347 instance_known().deallocate(__ptr); \ 350 #define DECLARE_STATIC_MEM_POOL_GROUPED__NOTHROW(_Cls, _Gid) \ 352 static void* operator new(size_t __size) throw() \ 354 assert(__size == sizeof(_Cls)); \ 355 return static_mem_pool<sizeof(_Cls), (_Gid)>:: \ 356 instance_known().allocate(); \ 358 static void operator delete(void* __ptr) \ 361 static_mem_pool<sizeof(_Cls), (_Gid)>:: \ 362 instance_known().deallocate(__ptr); \ 366 #define PREPARE_STATIC_MEM_POOL(_Cls) \ 367 std::cerr << "PREPARE_STATIC_MEM_POOL is obsolete!\n"; 370 #define PREPARE_STATIC_MEM_POOL_GROUPED(_Cls, _Gid) \ 371 std::cerr << "PREPARE_STATIC_MEM_POOL_GROUPED is obsolete!\n"; 375 #endif // _STATIC_MEM_POOL_H Definition: static_mem_pool.h:80
void deallocate(void *__ptr)
Definition: static_mem_pool.h:172
Definition: static_mem_pool.h:112
static static_mem_pool & instance_known()
Definition: static_mem_pool.h:141
virtual void recycle()
Definition: static_mem_pool.h:236
static static_mem_pool & instance()
Definition: static_mem_pool.h:125
void * allocate()
Definition: static_mem_pool.h:154