00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021 #ifndef __TBB_enumerable_thread_specific_H
00022 #define __TBB_enumerable_thread_specific_H
00023
00024 #include "concurrent_vector.h"
00025 #include "tbb_thread.h"
00026 #include "tbb_allocator.h"
00027 #include "cache_aligned_allocator.h"
00028 #include "aligned_space.h"
00029 #include <string.h>
00030
00031 #if _WIN32||_WIN64
00032 #include "machine/windows_api.h"
00033 #else
00034 #include <pthread.h>
00035 #endif
00036
00037 namespace tbb {
00038
00040 enum ets_key_usage_type { ets_key_per_instance, ets_no_key };
00041
00042 namespace interface6 {
00043
00045 namespace internal {
00046
00047 template<ets_key_usage_type ETS_key_type>
00048 class ets_base: tbb::internal::no_copy {
00049 protected:
00050 #if _WIN32||_WIN64
00051 typedef DWORD key_type;
00052 #else
00053 typedef pthread_t key_type;
00054 #endif
00055 #if __TBB_PROTECTED_NESTED_CLASS_BROKEN
00056 public:
00057 #endif
00058 struct slot;
00059
00060 struct array {
00061 array* next;
00062 size_t lg_size;
00063 slot& at( size_t k ) {
00064 return ((slot*)(void*)(this+1))[k];
00065 }
00066 size_t size() const {return (size_t)1<<lg_size;}
00067 size_t mask() const {return size()-1;}
00068 size_t start( size_t h ) const {
00069 return h>>(8*sizeof(size_t)-lg_size);
00070 }
00071 };
00072 struct slot {
00073 key_type key;
00074 void* ptr;
00075 bool empty() const {return !key;}
00076 bool match( key_type k ) const {return key==k;}
00077 bool claim( key_type k ) {
00078 __TBB_ASSERT(sizeof(tbb::atomic<key_type>)==sizeof(key_type), NULL);
00079 return tbb::internal::punned_cast<tbb::atomic<key_type>*>(&key)->compare_and_swap(k,0)==0;
00080 }
00081 };
00082 #if __TBB_PROTECTED_NESTED_CLASS_BROKEN
00083 protected:
00084 #endif
00085
00086 static key_type key_of_current_thread() {
00087 tbb::tbb_thread::id id = tbb::this_tbb_thread::get_id();
00088 key_type k;
00089 memcpy( &k, &id, sizeof(k) );
00090 return k;
00091 }
00092
00094
00096 atomic<array*> my_root;
00097 atomic<size_t> my_count;
00098 virtual void* create_local() = 0;
00099 virtual void* create_array(size_t _size) = 0;
00100 virtual void free_array(void* ptr, size_t _size) = 0;
00101 array* allocate( size_t lg_size ) {
00102 size_t n = 1<<lg_size;
00103 array* a = static_cast<array*>(create_array( sizeof(array)+n*sizeof(slot) ));
00104 a->lg_size = lg_size;
00105 std::memset( a+1, 0, n*sizeof(slot) );
00106 return a;
00107 }
00108 void free(array* a) {
00109 size_t n = 1<<(a->lg_size);
00110 free_array( (void *)a, size_t(sizeof(array)+n*sizeof(slot)) );
00111 }
00112 static size_t hash( key_type k ) {
00113
00114
00115 return uintptr_t(k)*tbb::internal::size_t_select(0x9E3779B9,0x9E3779B97F4A7C15ULL);
00116 }
00117
00118 ets_base() {my_root=NULL; my_count=0;}
00119 virtual ~ets_base();
00120 void* table_lookup( bool& exists );
00121 void table_clear();
00122 slot& table_find( key_type k ) {
00123 size_t h = hash(k);
00124 array* r = my_root;
00125 size_t mask = r->mask();
00126 for(size_t i = r->start(h);;i=(i+1)&mask) {
00127 slot& s = r->at(i);
00128 if( s.empty() || s.match(k) )
00129 return s;
00130 }
00131 }
00132 void table_reserve_for_copy( const ets_base& other ) {
00133 __TBB_ASSERT(!my_root,NULL);
00134 __TBB_ASSERT(!my_count,NULL);
00135 if( other.my_root ) {
00136 array* a = allocate(other.my_root->lg_size);
00137 a->next = NULL;
00138 my_root = a;
00139 my_count = other.my_count;
00140 }
00141 }
00142 };
00143
00144 template<ets_key_usage_type ETS_key_type>
00145 ets_base<ETS_key_type>::~ets_base() {
00146 __TBB_ASSERT(!my_root, NULL);
00147 }
00148
00149 template<ets_key_usage_type ETS_key_type>
00150 void ets_base<ETS_key_type>::table_clear() {
00151 while( array* r = my_root ) {
00152 my_root = r->next;
00153 free(r);
00154 }
00155 my_count = 0;
00156 }
00157
00158 template<ets_key_usage_type ETS_key_type>
00159 void* ets_base<ETS_key_type>::table_lookup( bool& exists ) {
00160 const key_type k = key_of_current_thread();
00161
00162 __TBB_ASSERT(k!=0,NULL);
00163 void* found;
00164 size_t h = hash(k);
00165 for( array* r=my_root; r; r=r->next ) {
00166 size_t mask=r->mask();
00167 for(size_t i = r->start(h); ;i=(i+1)&mask) {
00168 slot& s = r->at(i);
00169 if( s.empty() ) break;
00170 if( s.match(k) ) {
00171 if( r==my_root ) {
00172
00173 exists = true;
00174 return s.ptr;
00175 } else {
00176
00177 exists = true;
00178 found = s.ptr;
00179 goto insert;
00180 }
00181 }
00182 }
00183 }
00184
00185 exists = false;
00186 found = create_local();
00187 {
00188 size_t c = ++my_count;
00189 array* r = my_root;
00190 if( !r || c>r->size()/2 ) {
00191 size_t s = r ? r->lg_size : 2;
00192 while( c>size_t(1)<<(s-1) ) ++s;
00193 array* a = allocate(s);
00194 for(;;) {
00195 a->next = my_root;
00196 array* new_r = my_root.compare_and_swap(a,r);
00197 if( new_r==r ) break;
00198 if( new_r->lg_size>=s ) {
00199
00200 free(a);
00201 break;
00202 }
00203 r = new_r;
00204 }
00205 }
00206 }
00207 insert:
00208
00209 array* ir = my_root;
00210 size_t mask = ir->mask();
00211 for(size_t i = ir->start(h);;i=(i+1)&mask) {
00212 slot& s = ir->at(i);
00213 if( s.empty() ) {
00214 if( s.claim(k) ) {
00215 s.ptr = found;
00216 return found;
00217 }
00218 }
00219 }
00220 }
00221
00223 template <>
00224 class ets_base<ets_key_per_instance>: protected ets_base<ets_no_key> {
00225 typedef ets_base<ets_no_key> super;
00226 #if _WIN32||_WIN64
00227 #if __TBB_WIN8UI_SUPPORT
00228 typedef DWORD tls_key_t;
00229 void create_key() { my_key = FlsAlloc(NULL); }
00230 void destroy_key() { FlsFree(my_key); }
00231 void set_tls(void * value) { FlsSetValue(my_key, (LPVOID)value); }
00232 void* get_tls() { return (void *)FlsGetValue(my_key); }
00233 #else
00234 typedef DWORD tls_key_t;
00235 void create_key() { my_key = TlsAlloc(); }
00236 void destroy_key() { TlsFree(my_key); }
00237 void set_tls(void * value) { TlsSetValue(my_key, (LPVOID)value); }
00238 void* get_tls() { return (void *)TlsGetValue(my_key); }
00239 #endif
00240 #else
00241 typedef pthread_key_t tls_key_t;
00242 void create_key() { pthread_key_create(&my_key, NULL); }
00243 void destroy_key() { pthread_key_delete(my_key); }
00244 void set_tls( void * value ) const { pthread_setspecific(my_key, value); }
00245 void* get_tls() const { return pthread_getspecific(my_key); }
00246 #endif
00247 tls_key_t my_key;
00248 virtual void* create_local() = 0;
00249 virtual void* create_array(size_t _size) = 0;
00250 virtual void free_array(void* ptr, size_t _size) = 0;
00251 public:
00252 ets_base() {create_key();}
00253 ~ets_base() {destroy_key();}
00254 void* table_lookup( bool& exists ) {
00255 void* found = get_tls();
00256 if( found ) {
00257 exists=true;
00258 } else {
00259 found = super::table_lookup(exists);
00260 set_tls(found);
00261 }
00262 return found;
00263 }
00264 void table_clear() {
00265 destroy_key();
00266 create_key();
00267 super::table_clear();
00268 }
00269 };
00270
00272 template< typename Container, typename Value >
00273 class enumerable_thread_specific_iterator
00274 #if defined(_WIN64) && defined(_MSC_VER)
00275
00276 : public std::iterator<std::random_access_iterator_tag,Value>
00277 #endif
00278 {
00280
00281 Container *my_container;
00282 typename Container::size_type my_index;
00283 mutable Value *my_value;
00284
00285 template<typename C, typename T>
00286 friend enumerable_thread_specific_iterator<C,T> operator+( ptrdiff_t offset,
00287 const enumerable_thread_specific_iterator<C,T>& v );
00288
00289 template<typename C, typename T, typename U>
00290 friend bool operator==( const enumerable_thread_specific_iterator<C,T>& i,
00291 const enumerable_thread_specific_iterator<C,U>& j );
00292
00293 template<typename C, typename T, typename U>
00294 friend bool operator<( const enumerable_thread_specific_iterator<C,T>& i,
00295 const enumerable_thread_specific_iterator<C,U>& j );
00296
00297 template<typename C, typename T, typename U>
00298 friend ptrdiff_t operator-( const enumerable_thread_specific_iterator<C,T>& i, const enumerable_thread_specific_iterator<C,U>& j );
00299
00300 template<typename C, typename U>
00301 friend class enumerable_thread_specific_iterator;
00302
00303 public:
00304
00305 enumerable_thread_specific_iterator( const Container &container, typename Container::size_type index ) :
00306 my_container(&const_cast<Container &>(container)), my_index(index), my_value(NULL) {}
00307
00309 enumerable_thread_specific_iterator() : my_container(NULL), my_index(0), my_value(NULL) {}
00310
00311 template<typename U>
00312 enumerable_thread_specific_iterator( const enumerable_thread_specific_iterator<Container, U>& other ) :
00313 my_container( other.my_container ), my_index( other.my_index), my_value( const_cast<Value *>(other.my_value) ) {}
00314
00315 enumerable_thread_specific_iterator operator+( ptrdiff_t offset ) const {
00316 return enumerable_thread_specific_iterator(*my_container, my_index + offset);
00317 }
00318
00319 enumerable_thread_specific_iterator &operator+=( ptrdiff_t offset ) {
00320 my_index += offset;
00321 my_value = NULL;
00322 return *this;
00323 }
00324
00325 enumerable_thread_specific_iterator operator-( ptrdiff_t offset ) const {
00326 return enumerable_thread_specific_iterator( *my_container, my_index-offset );
00327 }
00328
00329 enumerable_thread_specific_iterator &operator-=( ptrdiff_t offset ) {
00330 my_index -= offset;
00331 my_value = NULL;
00332 return *this;
00333 }
00334
00335 Value& operator*() const {
00336 Value* value = my_value;
00337 if( !value ) {
00338 value = my_value = reinterpret_cast<Value *>(&(*my_container)[my_index].value);
00339 }
00340 __TBB_ASSERT( value==reinterpret_cast<Value *>(&(*my_container)[my_index].value), "corrupt cache" );
00341 return *value;
00342 }
00343
00344 Value& operator[]( ptrdiff_t k ) const {
00345 return (*my_container)[my_index + k].value;
00346 }
00347
00348 Value* operator->() const {return &operator*();}
00349
00350 enumerable_thread_specific_iterator& operator++() {
00351 ++my_index;
00352 my_value = NULL;
00353 return *this;
00354 }
00355
00356 enumerable_thread_specific_iterator& operator--() {
00357 --my_index;
00358 my_value = NULL;
00359 return *this;
00360 }
00361
00363 enumerable_thread_specific_iterator operator++(int) {
00364 enumerable_thread_specific_iterator result = *this;
00365 ++my_index;
00366 my_value = NULL;
00367 return result;
00368 }
00369
00371 enumerable_thread_specific_iterator operator--(int) {
00372 enumerable_thread_specific_iterator result = *this;
00373 --my_index;
00374 my_value = NULL;
00375 return result;
00376 }
00377
00378
00379 typedef ptrdiff_t difference_type;
00380 typedef Value value_type;
00381 typedef Value* pointer;
00382 typedef Value& reference;
00383 typedef std::random_access_iterator_tag iterator_category;
00384 };
00385
00386 template<typename Container, typename T>
00387 enumerable_thread_specific_iterator<Container,T> operator+( ptrdiff_t offset,
00388 const enumerable_thread_specific_iterator<Container,T>& v ) {
00389 return enumerable_thread_specific_iterator<Container,T>( v.my_container, v.my_index + offset );
00390 }
00391
00392 template<typename Container, typename T, typename U>
00393 bool operator==( const enumerable_thread_specific_iterator<Container,T>& i,
00394 const enumerable_thread_specific_iterator<Container,U>& j ) {
00395 return i.my_index==j.my_index && i.my_container == j.my_container;
00396 }
00397
00398 template<typename Container, typename T, typename U>
00399 bool operator!=( const enumerable_thread_specific_iterator<Container,T>& i,
00400 const enumerable_thread_specific_iterator<Container,U>& j ) {
00401 return !(i==j);
00402 }
00403
00404 template<typename Container, typename T, typename U>
00405 bool operator<( const enumerable_thread_specific_iterator<Container,T>& i,
00406 const enumerable_thread_specific_iterator<Container,U>& j ) {
00407 return i.my_index<j.my_index;
00408 }
00409
00410 template<typename Container, typename T, typename U>
00411 bool operator>( const enumerable_thread_specific_iterator<Container,T>& i,
00412 const enumerable_thread_specific_iterator<Container,U>& j ) {
00413 return j<i;
00414 }
00415
00416 template<typename Container, typename T, typename U>
00417 bool operator>=( const enumerable_thread_specific_iterator<Container,T>& i,
00418 const enumerable_thread_specific_iterator<Container,U>& j ) {
00419 return !(i<j);
00420 }
00421
00422 template<typename Container, typename T, typename U>
00423 bool operator<=( const enumerable_thread_specific_iterator<Container,T>& i,
00424 const enumerable_thread_specific_iterator<Container,U>& j ) {
00425 return !(j<i);
00426 }
00427
00428 template<typename Container, typename T, typename U>
00429 ptrdiff_t operator-( const enumerable_thread_specific_iterator<Container,T>& i,
00430 const enumerable_thread_specific_iterator<Container,U>& j ) {
00431 return i.my_index-j.my_index;
00432 }
00433
00434 template<typename SegmentedContainer, typename Value >
00435 class segmented_iterator
00436 #if defined(_WIN64) && defined(_MSC_VER)
00437 : public std::iterator<std::input_iterator_tag, Value>
00438 #endif
00439 {
00440 template<typename C, typename T, typename U>
00441 friend bool operator==(const segmented_iterator<C,T>& i, const segmented_iterator<C,U>& j);
00442
00443 template<typename C, typename T, typename U>
00444 friend bool operator!=(const segmented_iterator<C,T>& i, const segmented_iterator<C,U>& j);
00445
00446 template<typename C, typename U>
00447 friend class segmented_iterator;
00448
00449 public:
00450
00451 segmented_iterator() {my_segcont = NULL;}
00452
00453 segmented_iterator( const SegmentedContainer& _segmented_container ) :
00454 my_segcont(const_cast<SegmentedContainer*>(&_segmented_container)),
00455 outer_iter(my_segcont->end()) { }
00456
00457 ~segmented_iterator() {}
00458
00459 typedef typename SegmentedContainer::iterator outer_iterator;
00460 typedef typename SegmentedContainer::value_type InnerContainer;
00461 typedef typename InnerContainer::iterator inner_iterator;
00462
00463
00464 typedef ptrdiff_t difference_type;
00465 typedef Value value_type;
00466 typedef typename SegmentedContainer::size_type size_type;
00467 typedef Value* pointer;
00468 typedef Value& reference;
00469 typedef std::input_iterator_tag iterator_category;
00470
00471
00472 template<typename U>
00473 segmented_iterator(const segmented_iterator<SegmentedContainer, U>& other) :
00474 my_segcont(other.my_segcont),
00475 outer_iter(other.outer_iter),
00476
00477 inner_iter(other.inner_iter)
00478 {}
00479
00480
00481 template<typename U>
00482 segmented_iterator& operator=( const segmented_iterator<SegmentedContainer, U>& other) {
00483 if(this != &other) {
00484 my_segcont = other.my_segcont;
00485 outer_iter = other.outer_iter;
00486 if(outer_iter != my_segcont->end()) inner_iter = other.inner_iter;
00487 }
00488 return *this;
00489 }
00490
00491
00492
00493
00494 segmented_iterator& operator=(const outer_iterator& new_outer_iter) {
00495 __TBB_ASSERT(my_segcont != NULL, NULL);
00496
00497 for(outer_iter = new_outer_iter ;outer_iter!=my_segcont->end(); ++outer_iter) {
00498 if( !outer_iter->empty() ) {
00499 inner_iter = outer_iter->begin();
00500 break;
00501 }
00502 }
00503 return *this;
00504 }
00505
00506
00507 segmented_iterator& operator++() {
00508 advance_me();
00509 return *this;
00510 }
00511
00512
00513 segmented_iterator operator++(int) {
00514 segmented_iterator tmp = *this;
00515 operator++();
00516 return tmp;
00517 }
00518
00519 bool operator==(const outer_iterator& other_outer) const {
00520 __TBB_ASSERT(my_segcont != NULL, NULL);
00521 return (outer_iter == other_outer &&
00522 (outer_iter == my_segcont->end() || inner_iter == outer_iter->begin()));
00523 }
00524
00525 bool operator!=(const outer_iterator& other_outer) const {
00526 return !operator==(other_outer);
00527
00528 }
00529
00530
00531 reference operator*() const {
00532 __TBB_ASSERT(my_segcont != NULL, NULL);
00533 __TBB_ASSERT(outer_iter != my_segcont->end(), "Dereferencing a pointer at end of container");
00534 __TBB_ASSERT(inner_iter != outer_iter->end(), NULL);
00535 return *inner_iter;
00536 }
00537
00538
00539 pointer operator->() const { return &operator*();}
00540
00541 private:
00542 SegmentedContainer* my_segcont;
00543 outer_iterator outer_iter;
00544 inner_iterator inner_iter;
00545
00546 void advance_me() {
00547 __TBB_ASSERT(my_segcont != NULL, NULL);
00548 __TBB_ASSERT(outer_iter != my_segcont->end(), NULL);
00549 __TBB_ASSERT(inner_iter != outer_iter->end(), NULL);
00550 ++inner_iter;
00551 while(inner_iter == outer_iter->end() && ++outer_iter != my_segcont->end()) {
00552 inner_iter = outer_iter->begin();
00553 }
00554 }
00555 };
00556
00557 template<typename SegmentedContainer, typename T, typename U>
00558 bool operator==( const segmented_iterator<SegmentedContainer,T>& i,
00559 const segmented_iterator<SegmentedContainer,U>& j ) {
00560 if(i.my_segcont != j.my_segcont) return false;
00561 if(i.my_segcont == NULL) return true;
00562 if(i.outer_iter != j.outer_iter) return false;
00563 if(i.outer_iter == i.my_segcont->end()) return true;
00564 return i.inner_iter == j.inner_iter;
00565 }
00566
00567
00568 template<typename SegmentedContainer, typename T, typename U>
00569 bool operator!=( const segmented_iterator<SegmentedContainer,T>& i,
00570 const segmented_iterator<SegmentedContainer,U>& j ) {
00571 return !(i==j);
00572 }
00573
00574 template<typename T>
00575 struct destruct_only: tbb::internal::no_copy {
00576 tbb::aligned_space<T,1> value;
00577 ~destruct_only() {value.begin()[0].~T();}
00578 };
00579
00580 template<typename T>
00581 struct construct_by_default: tbb::internal::no_assign {
00582 void construct(void*where) {new(where) T();}
00583 construct_by_default( int ) {}
00584 };
00585
00586 template<typename T>
00587 struct construct_by_exemplar: tbb::internal::no_assign {
00588 const T exemplar;
00589 void construct(void*where) {new(where) T(exemplar);}
00590 construct_by_exemplar( const T& t ) : exemplar(t) {}
00591 };
00592
00593 template<typename T, typename Finit>
00594 struct construct_by_finit: tbb::internal::no_assign {
00595 Finit f;
00596 void construct(void* where) {new(where) T(f());}
00597 construct_by_finit( const Finit& f_ ) : f(f_) {}
00598 };
00599
00600
00601 template<typename T>
00602 class callback_base {
00603 public:
00604
00605 virtual callback_base* clone() = 0;
00606
00607 virtual void destroy() = 0;
00608
00609 virtual ~callback_base() { }
00610
00611 virtual void construct(void* where) = 0;
00612 };
00613
00614 template <typename T, typename Constructor>
00615 class callback_leaf: public callback_base<T>, Constructor {
00616 template<typename X> callback_leaf( const X& x ) : Constructor(x) {}
00617
00618 typedef typename tbb::tbb_allocator<callback_leaf> my_allocator_type;
00619
00620 callback_base<T>* clone() {
00621 void* where = my_allocator_type().allocate(1);
00622 return new(where) callback_leaf(*this);
00623 }
00624
00625 void destroy() {
00626 my_allocator_type().destroy(this);
00627 my_allocator_type().deallocate(this,1);
00628 }
00629
00630 void construct(void* where) {
00631 Constructor::construct(where);
00632 }
00633 public:
00634 template<typename X>
00635 static callback_base<T>* make( const X& x ) {
00636 void* where = my_allocator_type().allocate(1);
00637 return new(where) callback_leaf(x);
00638 }
00639 };
00640
00642
00647 template<typename U, size_t ModularSize>
00648 struct ets_element {
00649 char value[ModularSize==0 ? sizeof(U) : sizeof(U)+(tbb::internal::NFS_MaxLineSize-ModularSize)];
00650 void unconstruct() {
00651 tbb::internal::punned_cast<U*>(&value)->~U();
00652 }
00653 };
00654
00655 }
00657
00659
00678 template <typename T,
00679 typename Allocator=cache_aligned_allocator<T>,
00680 ets_key_usage_type ETS_key_type=ets_no_key >
00681 class enumerable_thread_specific: internal::ets_base<ETS_key_type> {
00682
00683 template<typename U, typename A, ets_key_usage_type C> friend class enumerable_thread_specific;
00684
00685 typedef internal::ets_element<T,sizeof(T)%tbb::internal::NFS_MaxLineSize> padded_element;
00686
00688 template<typename I>
00689 class generic_range_type: public blocked_range<I> {
00690 public:
00691 typedef T value_type;
00692 typedef T& reference;
00693 typedef const T& const_reference;
00694 typedef I iterator;
00695 typedef ptrdiff_t difference_type;
00696 generic_range_type( I begin_, I end_, size_t grainsize_ = 1) : blocked_range<I>(begin_,end_,grainsize_) {}
00697 template<typename U>
00698 generic_range_type( const generic_range_type<U>& r) : blocked_range<I>(r.begin(),r.end(),r.grainsize()) {}
00699 generic_range_type( generic_range_type& r, split ) : blocked_range<I>(r,split()) {}
00700 };
00701
00702 typedef typename Allocator::template rebind< padded_element >::other padded_allocator_type;
00703 typedef tbb::concurrent_vector< padded_element, padded_allocator_type > internal_collection_type;
00704
00705 internal::callback_base<T> *my_construct_callback;
00706
00707 internal_collection_type my_locals;
00708
00709 void* create_local() {
00710 #if TBB_DEPRECATED
00711 void* lref = &my_locals[my_locals.push_back(padded_element())];
00712 #else
00713 void* lref = &*my_locals.push_back(padded_element());
00714 #endif
00715 my_construct_callback->construct(lref);
00716 return lref;
00717 }
00718
00719 void unconstruct_locals() {
00720 for(typename internal_collection_type::iterator cvi = my_locals.begin(); cvi != my_locals.end(); ++cvi) {
00721 cvi->unconstruct();
00722 }
00723 }
00724
00725 typedef typename Allocator::template rebind< uintptr_t >::other array_allocator_type;
00726
00727
00728 void* create_array(size_t _size) {
00729 size_t nelements = (_size + sizeof(uintptr_t) -1) / sizeof(uintptr_t);
00730 return array_allocator_type().allocate(nelements);
00731 }
00732
00733 void free_array( void* _ptr, size_t _size) {
00734 size_t nelements = (_size + sizeof(uintptr_t) -1) / sizeof(uintptr_t);
00735 array_allocator_type().deallocate( reinterpret_cast<uintptr_t *>(_ptr),nelements);
00736 }
00737
00738 public:
00739
00741 typedef Allocator allocator_type;
00742 typedef T value_type;
00743 typedef T& reference;
00744 typedef const T& const_reference;
00745 typedef T* pointer;
00746 typedef const T* const_pointer;
00747 typedef typename internal_collection_type::size_type size_type;
00748 typedef typename internal_collection_type::difference_type difference_type;
00749
00750
00751 typedef typename internal::enumerable_thread_specific_iterator< internal_collection_type, value_type > iterator;
00752 typedef typename internal::enumerable_thread_specific_iterator< internal_collection_type, const value_type > const_iterator;
00753
00754
00755 typedef generic_range_type< iterator > range_type;
00756 typedef generic_range_type< const_iterator > const_range_type;
00757
00759 enumerable_thread_specific() :
00760 my_construct_callback( internal::callback_leaf<T,internal::construct_by_default<T> >::make(0) )
00761 {}
00762
00764 template <typename Finit>
00765 enumerable_thread_specific( Finit finit ) :
00766 my_construct_callback( internal::callback_leaf<T,internal::construct_by_finit<T,Finit> >::make( finit ) )
00767 {}
00768
00770 enumerable_thread_specific(const T& exemplar) :
00771 my_construct_callback( internal::callback_leaf<T,internal::construct_by_exemplar<T> >::make( exemplar ) )
00772 {}
00773
00775 ~enumerable_thread_specific() {
00776 my_construct_callback->destroy();
00777 this->clear();
00778
00779 }
00780
00782 reference local() {
00783 bool exists;
00784 return local(exists);
00785 }
00786
00788 reference local(bool& exists) {
00789 void* ptr = this->table_lookup(exists);
00790 return *(T*)ptr;
00791 }
00792
00794 size_type size() const { return my_locals.size(); }
00795
00797 bool empty() const { return my_locals.empty(); }
00798
00800 iterator begin() { return iterator( my_locals, 0 ); }
00802 iterator end() { return iterator(my_locals, my_locals.size() ); }
00803
00805 const_iterator begin() const { return const_iterator(my_locals, 0); }
00806
00808 const_iterator end() const { return const_iterator(my_locals, my_locals.size()); }
00809
00811 range_type range( size_t grainsize=1 ) { return range_type( begin(), end(), grainsize ); }
00812
00814 const_range_type range( size_t grainsize=1 ) const { return const_range_type( begin(), end(), grainsize ); }
00815
00817 void clear() {
00818 unconstruct_locals();
00819 my_locals.clear();
00820 this->table_clear();
00821
00822
00823 }
00824
00825 private:
00826
00827 template<typename U, typename A2, ets_key_usage_type C2>
00828 void internal_copy( const enumerable_thread_specific<U, A2, C2>& other);
00829
00830 public:
00831
00832 template<typename U, typename Alloc, ets_key_usage_type Cachetype>
00833 enumerable_thread_specific( const enumerable_thread_specific<U, Alloc, Cachetype>& other ) : internal::ets_base<ETS_key_type> ()
00834 {
00835 internal_copy(other);
00836 }
00837
00838 enumerable_thread_specific( const enumerable_thread_specific& other ) : internal::ets_base<ETS_key_type> ()
00839 {
00840 internal_copy(other);
00841 }
00842
00843 private:
00844
00845 template<typename U, typename A2, ets_key_usage_type C2>
00846 enumerable_thread_specific &
00847 internal_assign(const enumerable_thread_specific<U, A2, C2>& other) {
00848 if(static_cast<void *>( this ) != static_cast<const void *>( &other )) {
00849 this->clear();
00850 my_construct_callback->destroy();
00851 my_construct_callback = 0;
00852 internal_copy( other );
00853 }
00854 return *this;
00855 }
00856
00857 public:
00858
00859
00860 enumerable_thread_specific& operator=(const enumerable_thread_specific& other) {
00861 return internal_assign(other);
00862 }
00863
00864 template<typename U, typename Alloc, ets_key_usage_type Cachetype>
00865 enumerable_thread_specific& operator=(const enumerable_thread_specific<U, Alloc, Cachetype>& other)
00866 {
00867 return internal_assign(other);
00868 }
00869
00870
00871 template <typename combine_func_t>
00872 T combine(combine_func_t f_combine) {
00873 if(begin() == end()) {
00874 internal::destruct_only<T> location;
00875 my_construct_callback->construct(location.value.begin());
00876 return *location.value.begin();
00877 }
00878 const_iterator ci = begin();
00879 T my_result = *ci;
00880 while(++ci != end())
00881 my_result = f_combine( my_result, *ci );
00882 return my_result;
00883 }
00884
00885
00886 template <typename combine_func_t>
00887 void combine_each(combine_func_t f_combine) {
00888 for(const_iterator ci = begin(); ci != end(); ++ci) {
00889 f_combine( *ci );
00890 }
00891 }
00892
00893 };
00894
00895 template <typename T, typename Allocator, ets_key_usage_type ETS_key_type>
00896 template<typename U, typename A2, ets_key_usage_type C2>
00897 void enumerable_thread_specific<T,Allocator,ETS_key_type>::internal_copy( const enumerable_thread_specific<U, A2, C2>& other) {
00898
00899 my_construct_callback = other.my_construct_callback->clone();
00900
00901 typedef internal::ets_base<ets_no_key> base;
00902 __TBB_ASSERT(my_locals.size()==0,NULL);
00903 this->table_reserve_for_copy( other );
00904 for( base::array* r=other.my_root; r; r=r->next ) {
00905 for( size_t i=0; i<r->size(); ++i ) {
00906 base::slot& s1 = r->at(i);
00907 if( !s1.empty() ) {
00908 base::slot& s2 = this->table_find(s1.key);
00909 if( s2.empty() ) {
00910 #if TBB_DEPRECATED
00911 void* lref = &my_locals[my_locals.push_back(padded_element())];
00912 #else
00913 void* lref = &*my_locals.push_back(padded_element());
00914 #endif
00915 s2.ptr = new(lref) T(*(U*)s1.ptr);
00916 s2.key = s1.key;
00917 } else {
00918
00919 }
00920 }
00921 }
00922 }
00923 }
00924
00925 template< typename Container >
00926 class flattened2d {
00927
00928
00929 typedef typename Container::value_type conval_type;
00930
00931 public:
00932
00934 typedef typename conval_type::size_type size_type;
00935 typedef typename conval_type::difference_type difference_type;
00936 typedef typename conval_type::allocator_type allocator_type;
00937 typedef typename conval_type::value_type value_type;
00938 typedef typename conval_type::reference reference;
00939 typedef typename conval_type::const_reference const_reference;
00940 typedef typename conval_type::pointer pointer;
00941 typedef typename conval_type::const_pointer const_pointer;
00942
00943 typedef typename internal::segmented_iterator<Container, value_type> iterator;
00944 typedef typename internal::segmented_iterator<Container, const value_type> const_iterator;
00945
00946 flattened2d( const Container &c, typename Container::const_iterator b, typename Container::const_iterator e ) :
00947 my_container(const_cast<Container*>(&c)), my_begin(b), my_end(e) { }
00948
00949 flattened2d( const Container &c ) :
00950 my_container(const_cast<Container*>(&c)), my_begin(c.begin()), my_end(c.end()) { }
00951
00952 iterator begin() { return iterator(*my_container) = my_begin; }
00953 iterator end() { return iterator(*my_container) = my_end; }
00954 const_iterator begin() const { return const_iterator(*my_container) = my_begin; }
00955 const_iterator end() const { return const_iterator(*my_container) = my_end; }
00956
00957 size_type size() const {
00958 size_type tot_size = 0;
00959 for(typename Container::const_iterator i = my_begin; i != my_end; ++i) {
00960 tot_size += i->size();
00961 }
00962 return tot_size;
00963 }
00964
00965 private:
00966
00967 Container *my_container;
00968 typename Container::const_iterator my_begin;
00969 typename Container::const_iterator my_end;
00970
00971 };
00972
00973 template <typename Container>
00974 flattened2d<Container> flatten2d(const Container &c, const typename Container::const_iterator b, const typename Container::const_iterator e) {
00975 return flattened2d<Container>(c, b, e);
00976 }
00977
00978 template <typename Container>
00979 flattened2d<Container> flatten2d(const Container &c) {
00980 return flattened2d<Container>(c);
00981 }
00982
00983 }
00984
00985 namespace internal {
00986 using interface6::internal::segmented_iterator;
00987 }
00988
00989 using interface6::enumerable_thread_specific;
00990 using interface6::flattened2d;
00991 using interface6::flatten2d;
00992
00993 }
00994
00995 #endif