00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
#ifndef _MT_ALLOCATOR_H
00036
#define _MT_ALLOCATOR_H 1
00037
00038
#include <new>
00039
#include <cstdlib>
00040
#include <bits/functexcept.h>
00041
#include <bits/gthr.h>
00042
#include <bits/atomicity.h>
00043
00044
namespace __gnu_cxx
00045 {
00046
00047
00048
00049
00050
00051
00052
00053
00054
00055
00056
template<
typename _Tp>
00057 class __mt_alloc
00058 {
00059
public:
00060
typedef size_t size_type;
00061
typedef ptrdiff_t difference_type;
00062
typedef _Tp* pointer;
00063
typedef const _Tp* const_pointer;
00064
typedef _Tp& reference;
00065
typedef const _Tp& const_reference;
00066
typedef _Tp value_type;
00067
00068
template<
typename _Tp1>
00069
struct rebind
00070 {
typedef __mt_alloc<_Tp1> other; };
00071
00072
__mt_alloc()
throw()
00073 {
00074
00075 }
00076
00077
__mt_alloc(
const __mt_alloc&)
throw()
00078 {
00079
00080 }
00081
00082
template<
typename _Tp1>
00083
__mt_alloc(
const __mt_alloc<_Tp1>& obj)
throw()
00084 {
00085
00086 }
00087
00088 ~
__mt_alloc()
throw() { }
00089
00090 pointer
00091 address(reference __x)
const
00092
{
return &__x; }
00093
00094 const_pointer
00095 address(const_reference __x)
const
00096
{
return &__x; }
00097
00098 size_type
00099 max_size()
const throw()
00100 {
return size_t(-1) /
sizeof(_Tp); }
00101
00102
00103
00104
void
00105 construct(pointer __p,
const _Tp& __val)
00106 { ::new(__p) _Tp(__val); }
00107
00108
void
00109 destroy(pointer __p) { __p->~_Tp(); }
00110
00111 pointer
00112 allocate(size_type __n,
const void* = 0);
00113
00114
void
00115 deallocate(pointer __p, size_type __n);
00116
00117
00118
00119
struct _Tune
00120 {
00121
00122
00123
00124 size_t _M_align;
00125
00126
00127
00128
00129 size_t _M_max_bytes;
00130
00131
00132
00133 size_t _M_min_bin;
00134
00135
00136
00137
00138
00139
00140 size_t _M_chunk_size;
00141
00142
00143
00144
00145
00146
00147
00148 size_t _M_max_threads;
00149
00150
00151
00152
00153
00154
00155
00156 size_t _M_freelist_headroom;
00157
00158
00159
bool _M_force_new;
00160
00161
explicit
00162 _Tune()
00163 : _M_align(8), _M_max_bytes(128), _M_min_bin(8),
00164 _M_chunk_size(4096 - 4 *
sizeof(
void*)),
00165 _M_max_threads(4096), _M_freelist_headroom(10),
00166 _M_force_new(getenv(
"GLIBCXX_FORCE_NEW") ?
true :
false)
00167 { }
00168
00169
explicit
00170 _Tune(size_t __align, size_t __maxb, size_t __minbin,
00171 size_t __chunk, size_t __maxthreads, size_t __headroom,
00172
bool __force)
00173 : _M_align(__align), _M_max_bytes(__maxb), _M_min_bin(__minbin),
00174 _M_chunk_size(__chunk), _M_max_threads(__maxthreads),
00175 _M_freelist_headroom(__headroom), _M_force_new(__force)
00176 { }
00177 };
00178
00179
static const _Tune
00180 _S_get_options()
00181 {
return _S_options; }
00182
00183
static void
00184 _S_set_options(_Tune __t)
00185 {
00186
if (!_S_init)
00187 _S_options = __t;
00188 }
00189
00190
private:
00191
00192
00193
#ifdef __GTHREADS
00194
static __gthread_once_t _S_once;
00195
#endif
00196
static bool _S_init;
00197
00198
static void
00199 _S_initialize();
00200
00201
00202
static _Tune _S_options;
00203
00204
00205
00206
typedef unsigned short int _Binmap_type;
00207
static _Binmap_type* _S_binmap;
00208
00209
00210
00211
00212
00213
00214
00215
00216
00217
00218
#ifdef __GTHREADS
00219
struct _Thread_record
00220 {
00221
00222 _Thread_record*
volatile _M_next;
00223
00224
00225 size_t _M_id;
00226 };
00227
00228
static _Thread_record*
volatile _S_thread_freelist_first;
00229
static __gthread_mutex_t _S_thread_freelist_mutex;
00230
static __gthread_key_t _S_thread_key;
00231
00232
static void
00233 _S_destroy_thread_key(
void* __freelist_pos);
00234
#endif
00235
00236
static size_t
00237 _S_get_thread_id();
00238
00239
union _Block_record
00240 {
00241
00242 _Block_record*
volatile _M_next;
00243
00244
#ifdef __GTHREADS
00245
00246 size_t _M_thread_id;
00247
#endif
00248
};
00249
00250
struct _Bin_record
00251 {
00252
00253
00254
00255 _Block_record**
volatile _M_first;
00256
00257
#ifdef __GTHREADS
00258
00259
00260
00261
00262 size_t*
volatile _M_free;
00263 size_t*
volatile _M_used;
00264
00265
00266
00267
00268 __gthread_mutex_t* _M_mutex;
00269
#endif
00270
};
00271
00272
00273
00274
00275
static _Bin_record*
volatile _S_bin;
00276
00277
00278
static size_t _S_bin_size;
00279 };
00280
00281
template<
typename _Tp>
00282
typename __mt_alloc<_Tp>::pointer
00283
__mt_alloc<_Tp>::
00284
allocate(size_type __n,
const void*)
00285 {
00286
00287
00288
00289
00290
00291
if (!_S_init)
00292 {
00293
#ifdef __GTHREADS
00294
if (__gthread_active_p())
00295 __gthread_once(&_S_once, _S_initialize);
00296
#endif
00297
if (!_S_init)
00298 _S_initialize();
00299 }
00300
00301
00302
00303
const size_t __bytes = __n *
sizeof(_Tp);
00304
if (__bytes > _S_options._M_max_bytes || _S_options._M_force_new)
00305 {
00306
void* __ret = ::operator
new(__bytes);
00307
return static_cast<_Tp*>(__ret);
00308 }
00309
00310
00311
const size_t __which = _S_binmap[__bytes];
00312
const size_t __thread_id = _S_get_thread_id();
00313
00314
00315
00316
const _Bin_record& __bin = _S_bin[__which];
00317 _Block_record* __block = NULL;
00318
if (__bin._M_first[__thread_id] == NULL)
00319 {
00320
00321
00322
const size_t __bin_size = ((_S_options._M_min_bin << __which)
00323 + _S_options._M_align);
00324 size_t __block_count = _S_options._M_chunk_size / __bin_size;
00325
00326
00327
00328
00329
00330
00331
00332
00333
00334
00335
00336
#ifdef __GTHREADS
00337
if (__gthread_active_p())
00338 {
00339 __gthread_mutex_lock(__bin._M_mutex);
00340
if (__bin._M_first[0] == NULL)
00341 {
00342
00343
00344 __gthread_mutex_unlock(__bin._M_mutex);
00345
00346
void* __v = ::operator
new(_S_options._M_chunk_size);
00347 __bin._M_first[__thread_id] = static_cast<_Block_record*>(__v);
00348 __bin._M_free[__thread_id] = __block_count;
00349
00350 --__block_count;
00351 __block = __bin._M_first[__thread_id];
00352
while (__block_count-- > 0)
00353 {
00354
char* __c = reinterpret_cast<char*>(__block) + __bin_size;
00355 __block->_M_next = reinterpret_cast<_Block_record*>(__c);
00356 __block = __block->_M_next;
00357 }
00358 __block->_M_next = NULL;
00359 }
00360
else
00361 {
00362
00363
00364
00365 __bin._M_first[__thread_id] = __bin._M_first[0];
00366
if (__block_count >= __bin._M_free[0])
00367 {
00368 __bin._M_free[__thread_id] = __bin._M_free[0];
00369 __bin._M_free[0] = 0;
00370 __bin._M_first[0] = NULL;
00371 }
00372
else
00373 {
00374 __bin._M_free[__thread_id] = __block_count;
00375 __bin._M_free[0] -= __block_count;
00376 --__block_count;
00377 __block = __bin._M_first[0];
00378
while (__block_count-- > 0)
00379 __block = __block->_M_next;
00380 __bin._M_first[0] = __block->_M_next;
00381 __block->_M_next = NULL;
00382 }
00383 __gthread_mutex_unlock(__bin._M_mutex);
00384 }
00385 }
00386
else
00387
#endif
00388
{
00389
void* __v = ::operator
new(_S_options._M_chunk_size);
00390 __bin._M_first[0] = static_cast<_Block_record*>(__v);
00391
00392 --__block_count;
00393 __block = __bin._M_first[0];
00394
while (__block_count-- > 0)
00395 {
00396
char* __c = reinterpret_cast<char*>(__block) + __bin_size;
00397 __block->_M_next = reinterpret_cast<_Block_record*>(__c);
00398 __block = __block->_M_next;
00399 }
00400 __block->_M_next = NULL;
00401 }
00402 }
00403
00404 __block = __bin._M_first[__thread_id];
00405 __bin._M_first[__thread_id] = __bin._M_first[__thread_id]->_M_next;
00406
#ifdef __GTHREADS
00407
if (__gthread_active_p())
00408 {
00409 __block->_M_thread_id = __thread_id;
00410 --__bin._M_free[__thread_id];
00411 ++__bin._M_used[__thread_id];
00412 }
00413
#endif
00414
00415
char* __c = reinterpret_cast<char*>(__block) + _S_options._M_align;
00416
return static_cast<_Tp*>(static_cast<void*>(__c));
00417 }
00418
00419
template<
typename _Tp>
00420
void
00421 __mt_alloc<_Tp>::
00422 deallocate(pointer __p, size_type __n)
00423 {
00424
00425
00426
const size_t __bytes = __n *
sizeof(_Tp);
00427
if (__bytes > _S_options._M_max_bytes || _S_options._M_force_new)
00428 {
00429 ::operator
delete(__p);
00430
return;
00431 }
00432
00433
00434
const size_t __which = _S_binmap[__bytes];
00435
const _Bin_record& __bin = _S_bin[__which];
00436
00437
char* __c = reinterpret_cast<char*>(__p) - _S_options._M_align;
00438 _Block_record* __block = reinterpret_cast<_Block_record*>(__c);
00439
00440
#ifdef __GTHREADS
00441
if (__gthread_active_p())
00442 {
00443
00444
00445
00446
const size_t __thread_id = _S_get_thread_id();
00447
00448
long __remove = ((__bin._M_free[__thread_id]
00449 * _S_options._M_freelist_headroom)
00450 - __bin._M_used[__thread_id]);
00451
if (__remove > static_cast<long>(100 * (_S_bin_size - __which)
00452 * _S_options._M_freelist_headroom)
00453 && __remove > static_cast<long>(__bin._M_free[__thread_id]))
00454 {
00455 _Block_record* __tmp = __bin._M_first[__thread_id];
00456 _Block_record* __first = __tmp;
00457 __remove /= _S_options._M_freelist_headroom;
00458
const long __removed = __remove;
00459 --__remove;
00460
while (__remove-- > 0)
00461 __tmp = __tmp->_M_next;
00462 __bin._M_first[__thread_id] = __tmp->_M_next;
00463 __bin._M_free[__thread_id] -= __removed;
00464
00465 __gthread_mutex_lock(__bin._M_mutex);
00466 __tmp->_M_next = __bin._M_first[0];
00467 __bin._M_first[0] = __first;
00468 __bin._M_free[0] += __removed;
00469 __gthread_mutex_unlock(__bin._M_mutex);
00470 }
00471
00472
00473
00474 --__bin._M_used[__block->_M_thread_id];
00475
00476 __block->_M_next = __bin._M_first[__thread_id];
00477 __bin._M_first[__thread_id] = __block;
00478
00479 ++__bin._M_free[__thread_id];
00480 }
00481
else
00482
#endif
00483
{
00484
00485 __block->_M_next = __bin._M_first[0];
00486 __bin._M_first[0] = __block;
00487 }
00488 }
00489
00490
template<
typename _Tp>
00491
void
00492 __mt_alloc<_Tp>::
00493 _S_initialize()
00494 {
00495
00496
00497
00498
00499
00500
00501
00502
00503
if (_S_options._M_align == 0)
00504
new (&_S_options) _Tune;
00505
00506
00507
00508
00509
if (_S_options._M_force_new)
00510 {
00511 _S_init =
true;
00512
return;
00513 }
00514
00515
00516
00517 size_t __bin_size = _S_options._M_min_bin;
00518
while (_S_options._M_max_bytes > __bin_size)
00519 {
00520 __bin_size <<= 1;
00521 ++_S_bin_size;
00522 }
00523
00524
00525
const size_t __j = (_S_options._M_max_bytes + 1) *
sizeof(_Binmap_type);
00526 _S_binmap = static_cast<_Binmap_type*>(::operator
new(__j));
00527
00528 _Binmap_type* __bp = _S_binmap;
00529 _Binmap_type __bin_max = _S_options._M_min_bin;
00530 _Binmap_type __bint = 0;
00531
for (_Binmap_type __ct = 0; __ct <= _S_options._M_max_bytes; ++__ct)
00532 {
00533
if (__ct > __bin_max)
00534 {
00535 __bin_max <<= 1;
00536 ++__bint;
00537 }
00538 *__bp++ = __bint;
00539 }
00540
00541
00542
void* __v = ::operator
new(
sizeof(_Bin_record) * _S_bin_size);
00543 _S_bin = static_cast<_Bin_record*>(__v);
00544
00545
00546
00547
00548
#ifdef __GTHREADS
00549
if (__gthread_active_p())
00550 {
00551
const size_t __k =
sizeof(_Thread_record) * _S_options._M_max_threads;
00552 __v = ::operator
new(__k);
00553 _S_thread_freelist_first = static_cast<_Thread_record*>(__v);
00554
00555
00556
00557 size_t __i;
00558
for (__i = 1; __i < _S_options._M_max_threads; ++__i)
00559 {
00560 _Thread_record& __tr = _S_thread_freelist_first[__i - 1];
00561 __tr._M_next = &_S_thread_freelist_first[__i];
00562 __tr._M_id = __i;
00563 }
00564
00565
00566 _S_thread_freelist_first[__i - 1]._M_next = NULL;
00567 _S_thread_freelist_first[__i - 1]._M_id = __i;
00568
00569
00570
#ifndef __GTHREAD_MUTEX_INIT
00571
__GTHREAD_MUTEX_INIT_FUNCTION(&_S_thread_freelist_mutex);
00572
#endif
00573
00574
00575 __gthread_key_create(&_S_thread_key, _S_destroy_thread_key);
00576
00577
const size_t __max_threads = _S_options._M_max_threads + 1;
00578
for (size_t __n = 0; __n < _S_bin_size; ++__n)
00579 {
00580 _Bin_record& __bin = _S_bin[__n];
00581 __v = ::operator
new(
sizeof(_Block_record*) * __max_threads);
00582 __bin._M_first = static_cast<_Block_record**>(__v);
00583
00584 __v = ::operator
new(
sizeof(size_t) * __max_threads);
00585 __bin._M_free = static_cast<size_t*>(__v);
00586
00587 __v = ::operator
new(
sizeof(size_t) * __max_threads);
00588 __bin._M_used = static_cast<size_t*>(__v);
00589
00590 __v = ::operator
new(
sizeof(__gthread_mutex_t));
00591 __bin._M_mutex = static_cast<__gthread_mutex_t*>(__v);
00592
00593
#ifdef __GTHREAD_MUTEX_INIT
00594
{
00595
00596 __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;
00597 *__bin._M_mutex = __tmp;
00598 }
00599
#else
00600
{ __GTHREAD_MUTEX_INIT_FUNCTION(__bin._M_mutex); }
00601
#endif
00602
00603
for (size_t __threadn = 0; __threadn < __max_threads;
00604 ++__threadn)
00605 {
00606 __bin._M_first[__threadn] = NULL;
00607 __bin._M_free[__threadn] = 0;
00608 __bin._M_used[__threadn] = 0;
00609 }
00610 }
00611 }
00612
else
00613
#endif
00614
for (size_t __n = 0; __n < _S_bin_size; ++__n)
00615 {
00616 _Bin_record& __bin = _S_bin[__n];
00617 __v = ::operator
new(
sizeof(_Block_record*));
00618 __bin._M_first = static_cast<_Block_record**>(__v);
00619 __bin._M_first[0] = NULL;
00620 }
00621
00622 _S_init =
true;
00623 }
00624
00625
template<
typename _Tp>
00626 size_t
00627 __mt_alloc<_Tp>::
00628 _S_get_thread_id()
00629 {
00630
#ifdef __GTHREADS
00631
00632
00633
00634
00635
if (__gthread_active_p())
00636 {
00637 _Thread_record* __freelist_pos =
00638 static_cast<_Thread_record*>(__gthread_getspecific(_S_thread_key));
00639
if (__freelist_pos == NULL)
00640 {
00641
00642
00643
00644 __gthread_mutex_lock(&_S_thread_freelist_mutex);
00645 __freelist_pos = _S_thread_freelist_first;
00646 _S_thread_freelist_first = _S_thread_freelist_first->_M_next;
00647 __gthread_mutex_unlock(&_S_thread_freelist_mutex);
00648
00649 __gthread_setspecific(_S_thread_key,
00650 static_cast<void*>(__freelist_pos));
00651 }
00652
return __freelist_pos->_M_id;
00653 }
00654
#endif
00655
00656
00657
return 0;
00658 }
00659
00660
#ifdef __GTHREADS
00661
template<
typename _Tp>
00662
void
00663 __mt_alloc<_Tp>::
00664 _S_destroy_thread_key(
void* __freelist_pos)
00665 {
00666
00667 __gthread_mutex_lock(&_S_thread_freelist_mutex);
00668 _Thread_record* __tr = static_cast<_Thread_record*>(__freelist_pos);
00669 __tr->_M_next = _S_thread_freelist_first;
00670 _S_thread_freelist_first = __tr;
00671 __gthread_mutex_unlock(&_S_thread_freelist_mutex);
00672 }
00673
#endif
00674
00675
template<
typename _Tp>
00676
inline bool
00677 operator==(
const __mt_alloc<_Tp>&,
const __mt_alloc<_Tp>&)
00678 {
return true; }
00679
00680
template<
typename _Tp>
00681
inline bool
00682 operator!=(
const __mt_alloc<_Tp>&,
const __mt_alloc<_Tp>&)
00683 {
return false; }
00684
00685
template<
typename _Tp>
00686
bool __mt_alloc<_Tp>::_S_init =
false;
00687
00688
template<
typename _Tp>
00689
typename __mt_alloc<_Tp>::_Tune __mt_alloc<_Tp>::_S_options;
00690
00691
template<
typename _Tp>
00692
typename __mt_alloc<_Tp>::_Binmap_type* __mt_alloc<_Tp>::_S_binmap;
00693
00694
template<
typename _Tp>
00695
typename __mt_alloc<_Tp>::_Bin_record*
volatile __mt_alloc<_Tp>::_S_bin;
00696
00697
template<
typename _Tp>
00698 size_t __mt_alloc<_Tp>::_S_bin_size = 1;
00699
00700
00701
#ifdef __GTHREADS
00702
template<
typename _Tp>
00703 __gthread_once_t __mt_alloc<_Tp>::_S_once = __GTHREAD_ONCE_INIT;
00704
00705
template<
typename _Tp>
00706
typename __mt_alloc<_Tp>::_Thread_record*
00707
volatile __mt_alloc<_Tp>::_S_thread_freelist_first = NULL;
00708
00709
template<
typename _Tp>
00710 __gthread_key_t __mt_alloc<_Tp>::_S_thread_key;
00711
00712
template<
typename _Tp>
00713 __gthread_mutex_t
00714
#ifdef __GTHREAD_MUTEX_INIT
00715
__mt_alloc<_Tp>::_S_thread_freelist_mutex = __GTHREAD_MUTEX_INIT;
00716
#else
00717
__mt_alloc<_Tp>::_S_thread_freelist_mutex;
00718
#endif
00719
#endif
00720
}
00721
00722
#endif