File indexing completed on 2025-01-18 10:12:46
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020 #ifndef __TBB__concurrent_unordered_impl_H
0021 #define __TBB__concurrent_unordered_impl_H
0022 #if !defined(__TBB_concurrent_unordered_map_H) && !defined(__TBB_concurrent_unordered_set_H) && !defined(__TBB_concurrent_hash_map_H)
0023 #error Do not #include this internal file directly; use public TBB headers instead.
0024 #endif
0025
0026 #include "../tbb_stddef.h"
0027
0028 #include <iterator>
0029 #include <utility> // Need std::pair
0030 #include <functional> // Need std::equal_to (in ../concurrent_unordered_*.h)
0031 #include <string> // For tbb_hasher
0032 #include <cstring> // Need std::memset
0033 #include __TBB_STD_SWAP_HEADER
0034
0035 #include "../atomic.h"
0036 #include "../tbb_exception.h"
0037 #include "../tbb_allocator.h"
0038
0039 #if __TBB_INITIALIZER_LISTS_PRESENT
0040 #include <initializer_list>
0041 #endif
0042
0043 #if __TBB_CPP11_RVALUE_REF_PRESENT && !__TBB_IMPLICIT_COPY_DELETION_BROKEN
0044 #define __TBB_UNORDERED_NODE_HANDLE_PRESENT 1
0045 #endif
0046
0047 #include "_allocator_traits.h"
0048 #include "_tbb_hash_compare_impl.h"
0049 #include "_template_helpers.h"
0050
0051 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT
0052 #include "_node_handle_impl.h"
0053 #endif
0054
0055 namespace tbb {
0056 namespace interface5 {
0057
0058 namespace internal {
0059
0060 template <typename T, typename Allocator>
0061 class split_ordered_list;
0062 template <typename Traits>
0063 class concurrent_unordered_base;
0064
0065
0066 template<class Solist, typename Value>
0067 class flist_iterator : public std::iterator<std::forward_iterator_tag, Value>
0068 {
0069 template <typename T, typename Allocator>
0070 friend class split_ordered_list;
0071 template <typename Traits>
0072 friend class concurrent_unordered_base;
0073 template<class M, typename V>
0074 friend class flist_iterator;
0075
0076 typedef typename Solist::nodeptr_t nodeptr_t;
0077 public:
0078 typedef typename Solist::value_type value_type;
0079 typedef typename Solist::difference_type difference_type;
0080 typedef typename Solist::pointer pointer;
0081 typedef typename Solist::reference reference;
0082
0083 flist_iterator() : my_node_ptr(0) {}
0084 flist_iterator( const flist_iterator<Solist, typename Solist::value_type> &other )
0085 : my_node_ptr(other.my_node_ptr) {}
0086
0087 flist_iterator& operator=( const flist_iterator<Solist, typename Solist::value_type> &other ) {
0088 my_node_ptr = other.my_node_ptr;
0089 return *this;
0090 }
0091
0092 reference operator*() const { return my_node_ptr->my_element; }
0093 pointer operator->() const { return &**this; }
0094
0095 flist_iterator& operator++() {
0096 my_node_ptr = my_node_ptr->my_next;
0097 return *this;
0098 }
0099
0100 flist_iterator operator++(int) {
0101 flist_iterator tmp = *this;
0102 ++*this;
0103 return tmp;
0104 }
0105
0106 protected:
0107 flist_iterator(nodeptr_t pnode) : my_node_ptr(pnode) {}
0108 nodeptr_t get_node_ptr() const { return my_node_ptr; }
0109
0110 nodeptr_t my_node_ptr;
0111
0112 template<typename M, typename T, typename U>
0113 friend bool operator==( const flist_iterator<M,T> &i, const flist_iterator<M,U> &j );
0114 template<typename M, typename T, typename U>
0115 friend bool operator!=( const flist_iterator<M,T>& i, const flist_iterator<M,U>& j );
0116 };
0117
0118 template<typename Solist, typename T, typename U>
0119 bool operator==( const flist_iterator<Solist,T> &i, const flist_iterator<Solist,U> &j ) {
0120 return i.my_node_ptr == j.my_node_ptr;
0121 }
0122 template<typename Solist, typename T, typename U>
0123 bool operator!=( const flist_iterator<Solist,T>& i, const flist_iterator<Solist,U>& j ) {
0124 return i.my_node_ptr != j.my_node_ptr;
0125 }
0126
0127
0128 template<class Solist, typename Value>
0129 class solist_iterator : public flist_iterator<Solist, Value>
0130 {
0131 typedef flist_iterator<Solist, Value> base_type;
0132 typedef typename Solist::nodeptr_t nodeptr_t;
0133 using base_type::get_node_ptr;
0134 template <typename T, typename Allocator>
0135 friend class split_ordered_list;
0136 template<class M, typename V>
0137 friend class solist_iterator;
0138 template <typename Traits>
0139 friend class concurrent_unordered_base;
0140 template<typename M, typename T, typename U>
0141 friend bool operator==( const solist_iterator<M,T> &i, const solist_iterator<M,U> &j );
0142 template<typename M, typename T, typename U>
0143 friend bool operator!=( const solist_iterator<M,T>& i, const solist_iterator<M,U>& j );
0144
0145 const Solist *my_list_ptr;
0146 solist_iterator(nodeptr_t pnode, const Solist *plist) : base_type(pnode), my_list_ptr(plist) {}
0147
0148 public:
0149 typedef typename Solist::value_type value_type;
0150 typedef typename Solist::difference_type difference_type;
0151 typedef typename Solist::pointer pointer;
0152 typedef typename Solist::reference reference;
0153
0154 solist_iterator() {}
0155 solist_iterator( const solist_iterator<Solist, typename Solist::value_type> &other )
0156 : base_type(other), my_list_ptr(other.my_list_ptr) {}
0157
0158 solist_iterator& operator=( const solist_iterator<Solist, typename Solist::value_type> &other ) {
0159 base_type::my_node_ptr = other.get_node_ptr();
0160 my_list_ptr = other.my_list_ptr;
0161 return *this;
0162 }
0163
0164 reference operator*() const {
0165 return this->base_type::operator*();
0166 }
0167
0168 pointer operator->() const {
0169 return (&**this);
0170 }
0171
0172 solist_iterator& operator++() {
0173 do ++(*(base_type *)this);
0174 while (get_node_ptr() != NULL && get_node_ptr()->is_dummy());
0175
0176 return (*this);
0177 }
0178
0179 solist_iterator operator++(int) {
0180 solist_iterator tmp = *this;
0181 do ++*this;
0182 while (get_node_ptr() != NULL && get_node_ptr()->is_dummy());
0183
0184 return (tmp);
0185 }
0186 };
0187
0188 template<typename Solist, typename T, typename U>
0189 bool operator==( const solist_iterator<Solist,T> &i, const solist_iterator<Solist,U> &j ) {
0190 return i.my_node_ptr == j.my_node_ptr && i.my_list_ptr == j.my_list_ptr;
0191 }
0192 template<typename Solist, typename T, typename U>
0193 bool operator!=( const solist_iterator<Solist,T>& i, const solist_iterator<Solist,U>& j ) {
0194 return i.my_node_ptr != j.my_node_ptr || i.my_list_ptr != j.my_list_ptr;
0195 }
0196
0197
0198 typedef size_t sokey_t;
0199
0200
0201
0202 template <typename T, typename Allocator>
0203 class split_ordered_list
0204 {
0205 public:
0206 typedef split_ordered_list<T, Allocator> self_type;
0207
0208 typedef typename tbb::internal::allocator_rebind<Allocator, T>::type allocator_type;
0209
0210 struct node;
0211 typedef node *nodeptr_t;
0212
0213 typedef typename tbb::internal::allocator_traits<allocator_type>::value_type value_type;
0214 typedef typename tbb::internal::allocator_traits<allocator_type>::size_type size_type;
0215 typedef typename tbb::internal::allocator_traits<allocator_type>::difference_type difference_type;
0216 typedef typename tbb::internal::allocator_traits<allocator_type>::pointer pointer;
0217 typedef typename tbb::internal::allocator_traits<allocator_type>::const_pointer const_pointer;
0218
0219 typedef value_type& reference;
0220 typedef const value_type& const_reference;
0221
0222 typedef solist_iterator<self_type, const value_type> const_iterator;
0223 typedef solist_iterator<self_type, value_type> iterator;
0224 typedef flist_iterator<self_type, const value_type> raw_const_iterator;
0225 typedef flist_iterator<self_type, value_type> raw_iterator;
0226
0227
0228 struct node : tbb::internal::no_assign
0229 {
0230 private:
0231
0232 node();
0233 public:
0234
0235 void init(sokey_t order_key) {
0236 my_order_key = order_key;
0237 my_next = NULL;
0238 }
0239
0240
0241 sokey_t get_order_key() const {
0242 return my_order_key;
0243 }
0244
0245
0246 value_type* storage() {
0247 return reinterpret_cast<value_type*>(&my_element);
0248 }
0249
0250 value_type& value() {
0251 return *storage();
0252 }
0253
0254
0255 nodeptr_t atomic_set_next(nodeptr_t new_node, nodeptr_t current_node)
0256 {
0257
0258 nodeptr_t exchange_node = tbb::internal::as_atomic(my_next).compare_and_swap(new_node, current_node);
0259
0260 if (exchange_node == current_node)
0261 {
0262
0263 return new_node;
0264 }
0265 else
0266 {
0267
0268 return exchange_node;
0269 }
0270 }
0271
0272
0273
0274 bool is_dummy() const {
0275 return (my_order_key & 0x1) == 0;
0276 }
0277
0278
0279 nodeptr_t my_next;
0280 value_type my_element;
0281 sokey_t my_order_key;
0282 };
0283
0284
0285 nodeptr_t create_node(sokey_t order_key) {
0286 nodeptr_t pnode = my_node_allocator.allocate(1);
0287 pnode->init(order_key);
0288 return (pnode);
0289 }
0290
0291
0292 template<typename Arg>
0293 nodeptr_t create_node(sokey_t order_key, __TBB_FORWARDING_REF(Arg) t,
0294 tbb::internal::true_type=tbb::internal::true_type()){
0295 nodeptr_t pnode = my_node_allocator.allocate(1);
0296
0297
0298 __TBB_TRY {
0299 new(static_cast<void*>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
0300 pnode->init(order_key);
0301 } __TBB_CATCH(...) {
0302 my_node_allocator.deallocate(pnode, 1);
0303 __TBB_RETHROW();
0304 }
0305
0306 return (pnode);
0307 }
0308
0309
0310 template<typename Arg>
0311 nodeptr_t create_node(sokey_t, __TBB_FORWARDING_REF(Arg),
0312 tbb::internal::false_type){
0313 __TBB_ASSERT(false, "This compile-time helper should never get called");
0314 return nodeptr_t();
0315 }
0316
0317
0318 template<typename __TBB_PARAMETER_PACK Args>
0319 nodeptr_t create_node_v( __TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args){
0320 nodeptr_t pnode = my_node_allocator.allocate(1);
0321
0322
0323 __TBB_TRY {
0324 new(static_cast<void*>(&pnode->my_element)) T(__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
0325 } __TBB_CATCH(...) {
0326 my_node_allocator.deallocate(pnode, 1);
0327 __TBB_RETHROW();
0328 }
0329
0330 return (pnode);
0331 }
0332
0333 split_ordered_list(allocator_type a = allocator_type())
0334 : my_node_allocator(a), my_element_count(0)
0335 {
0336
0337
0338 my_head = create_node(sokey_t(0));
0339 }
0340
0341 ~split_ordered_list()
0342 {
0343
0344 clear();
0345
0346
0347 nodeptr_t pnode = my_head;
0348 my_head = NULL;
0349
0350 __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL, "Invalid head list node");
0351
0352 destroy_node(pnode);
0353 }
0354
0355
0356
0357 allocator_type get_allocator() const {
0358 return (my_node_allocator);
0359 }
0360
0361 void clear() {
0362 nodeptr_t pnext;
0363 nodeptr_t pnode = my_head;
0364
0365 __TBB_ASSERT(my_head != NULL, "Invalid head list node");
0366 pnext = pnode->my_next;
0367 pnode->my_next = NULL;
0368 pnode = pnext;
0369
0370 while (pnode != NULL)
0371 {
0372 pnext = pnode->my_next;
0373 destroy_node(pnode);
0374 pnode = pnext;
0375 }
0376
0377 my_element_count = 0;
0378 }
0379
0380
0381 iterator begin() {
0382 return first_real_iterator(raw_begin());
0383 }
0384
0385
0386 const_iterator begin() const {
0387 return first_real_iterator(raw_begin());
0388 }
0389
0390 iterator end() {
0391 return (iterator(0, this));
0392 }
0393
0394 const_iterator end() const {
0395 return (const_iterator(0, this));
0396 }
0397
0398 const_iterator cbegin() const {
0399 return (((const self_type *)this)->begin());
0400 }
0401
0402 const_iterator cend() const {
0403 return (((const self_type *)this)->end());
0404 }
0405
0406
0407 bool empty() const {
0408 return (my_element_count == 0);
0409 }
0410
0411
0412 size_type size() const {
0413 return my_element_count;
0414 }
0415
0416
0417 size_type max_size() const {
0418 return my_node_allocator.max_size();
0419 }
0420
0421
0422 void swap(self_type& other)
0423 {
0424 if (this == &other)
0425 {
0426
0427 return;
0428 }
0429
0430 std::swap(my_element_count, other.my_element_count);
0431 std::swap(my_head, other.my_head);
0432 }
0433
0434
0435
0436
0437 raw_iterator raw_begin() {
0438 return raw_iterator(my_head);
0439 }
0440
0441
0442 raw_const_iterator raw_begin() const {
0443 return raw_const_iterator(my_head);
0444 }
0445
0446 raw_iterator raw_end() {
0447 return raw_iterator(0);
0448 }
0449
0450 raw_const_iterator raw_end() const {
0451 return raw_const_iterator(0);
0452 }
0453
0454 static sokey_t get_order_key(const raw_const_iterator& it) {
0455 return it.get_node_ptr()->get_order_key();
0456 }
0457
0458 static sokey_t get_safe_order_key(const raw_const_iterator& it) {
0459 if( !it.get_node_ptr() ) return ~sokey_t(0);
0460 return it.get_node_ptr()->get_order_key();
0461 }
0462
0463
0464
0465 iterator get_iterator(raw_iterator it) {
0466 __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
0467 return iterator(it.get_node_ptr(), this);
0468 }
0469
0470
0471
0472 const_iterator get_iterator(raw_const_iterator it) const {
0473 __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
0474 return const_iterator(it.get_node_ptr(), this);
0475 }
0476
0477
0478 raw_iterator get_iterator(raw_const_iterator it) {
0479 return raw_iterator(it.get_node_ptr());
0480 }
0481
0482
0483 static iterator get_iterator(const_iterator it) {
0484 return iterator(it.my_node_ptr, it.my_list_ptr);
0485 }
0486
0487
0488
0489 iterator first_real_iterator(raw_iterator it)
0490 {
0491
0492 while (it != raw_end() && it.get_node_ptr()->is_dummy())
0493 ++it;
0494
0495 return iterator(it.get_node_ptr(), this);
0496 }
0497
0498
0499
0500 const_iterator first_real_iterator(raw_const_iterator it) const
0501 {
0502
0503 while (it != raw_end() && it.get_node_ptr()->is_dummy())
0504 ++it;
0505
0506 return const_iterator(it.get_node_ptr(), this);
0507 }
0508
0509
0510 void destroy_node(nodeptr_t pnode) {
0511 if (!pnode->is_dummy()) my_node_allocator.destroy(pnode);
0512 my_node_allocator.deallocate(pnode, 1);
0513 }
0514
0515
0516
0517 static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node) {
0518 new_node->my_next = current_node;
0519 return previous->atomic_set_next(new_node, current_node);
0520 }
0521
0522
0523 std::pair<iterator, bool> try_insert(raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
0524 {
0525 nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), pnode, next.get_node_ptr());
0526
0527 if (inserted_node == pnode)
0528 {
0529
0530 check_range(it, next);
0531 *new_count = tbb::internal::as_atomic(my_element_count).fetch_and_increment();
0532 return std::pair<iterator, bool>(iterator(pnode, this), true);
0533 }
0534 else
0535 {
0536 return std::pair<iterator, bool>(end(), false);
0537 }
0538 }
0539
0540
0541 raw_iterator insert_dummy(raw_iterator it, sokey_t order_key)
0542 {
0543 raw_iterator last = raw_end();
0544 raw_iterator where = it;
0545
0546 __TBB_ASSERT(where != last, "Invalid head node");
0547
0548 ++where;
0549
0550
0551 nodeptr_t dummy_node = create_node(order_key);
0552
0553 for (;;)
0554 {
0555 __TBB_ASSERT(it != last, "Invalid head list node");
0556
0557
0558
0559 if (where == last || get_order_key(where) > order_key)
0560 {
0561 __TBB_ASSERT(get_order_key(it) < order_key, "Invalid node order in the list");
0562
0563
0564 nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), dummy_node, where.get_node_ptr());
0565
0566 if (inserted_node == dummy_node)
0567 {
0568
0569 check_range(it, where);
0570 return raw_iterator(dummy_node);
0571 }
0572 else
0573 {
0574
0575
0576
0577
0578
0579 where = it;
0580 ++where;
0581 continue;
0582 }
0583 }
0584 else if (get_order_key(where) == order_key)
0585 {
0586
0587 destroy_node(dummy_node);
0588 return where;
0589 }
0590
0591
0592 it = where;
0593 ++where;
0594 }
0595
0596 }
0597
0598 nodeptr_t erase_node_impl(raw_iterator previous, raw_const_iterator& where) {
0599 nodeptr_t pnode = (where++).get_node_ptr();
0600 nodeptr_t prevnode = previous.get_node_ptr();
0601 __TBB_ASSERT(prevnode->my_next == pnode, "Erase must take consecutive iterators");
0602 prevnode->my_next = pnode->my_next;
0603 return pnode;
0604 }
0605
0606
0607 void erase_node(raw_iterator previous, raw_const_iterator& where,
0608 tbb::internal::true_type)
0609 {
0610 nodeptr_t pnode = erase_node_impl(previous, where);
0611 destroy_node(pnode);
0612 }
0613
0614 void erase_node(raw_iterator previous, raw_const_iterator& where,
0615 tbb::internal::false_type)
0616 {
0617 erase_node_impl(previous, where);
0618 }
0619
0620 void erase_node(raw_iterator previous, raw_const_iterator& where) {
0621 erase_node(previous, where, tbb::internal::true_type());
0622 }
0623
0624
0625 template<typename AllowDestroy>
0626 iterator erase_node(raw_iterator previous, const_iterator where, AllowDestroy)
0627 {
0628 raw_const_iterator it = where;
0629 erase_node(previous, it, AllowDestroy());
0630 my_element_count--;
0631
0632 return get_iterator(first_real_iterator(it));
0633 }
0634
0635 iterator erase_node(raw_iterator previous, const_iterator& where) {
0636 return erase_node(previous, where, tbb::internal::true_type());
0637 }
0638
0639
0640
0641
0642 void move_all(self_type& source)
0643 {
0644 raw_const_iterator first = source.raw_begin();
0645 raw_const_iterator last = source.raw_end();
0646
0647 if (first == last)
0648 return;
0649
0650 nodeptr_t previous_node = my_head;
0651 raw_const_iterator begin_iterator = first++;
0652
0653
0654 for (raw_const_iterator it = first; it != last;)
0655 {
0656 nodeptr_t pnode = it.get_node_ptr();
0657
0658 nodeptr_t dummy_node = pnode->is_dummy() ? create_node(pnode->get_order_key()) : create_node(pnode->get_order_key(), pnode->my_element);
0659 previous_node = try_insert_atomic(previous_node, dummy_node, NULL);
0660 __TBB_ASSERT(previous_node != NULL, "Insertion must succeed");
0661 raw_const_iterator where = it++;
0662 source.erase_node(get_iterator(begin_iterator), where);
0663 }
0664 check_range();
0665 }
0666
0667
0668 private:
0669
0670 template <typename Traits>
0671 friend class concurrent_unordered_base;
0672
0673
0674 void check_range( raw_iterator first, raw_iterator last )
0675 {
0676 #if TBB_USE_ASSERT
0677 for (raw_iterator it = first; it != last; ++it)
0678 {
0679 raw_iterator next = it;
0680 ++next;
0681
0682 __TBB_ASSERT(next == raw_end() || get_order_key(next) >= get_order_key(it), "!!! List order inconsistency !!!");
0683 }
0684 #else
0685 tbb::internal::suppress_unused_warning(first, last);
0686 #endif
0687 }
0688 void check_range()
0689 {
0690 #if TBB_USE_ASSERT
0691 check_range( raw_begin(), raw_end() );
0692 #endif
0693 }
0694
0695 typename tbb::internal::allocator_rebind<allocator_type, node>::type my_node_allocator;
0696 size_type my_element_count;
0697 nodeptr_t my_head;
0698 };
0699
0700 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
0701 #pragma warning(push)
0702 #pragma warning(disable: 4127)
0703 #endif
0704
0705 template <typename Traits>
0706 class concurrent_unordered_base : public Traits
0707 {
0708 protected:
0709
0710 typedef concurrent_unordered_base<Traits> self_type;
0711 typedef typename Traits::value_type value_type;
0712 typedef typename Traits::key_type key_type;
0713 typedef typename Traits::hash_compare hash_compare;
0714 typedef typename Traits::allocator_type allocator_type;
0715 typedef typename hash_compare::hasher hasher;
0716 typedef typename hash_compare::key_equal key_equal;
0717
0718 typedef typename tbb::internal::allocator_traits<allocator_type>::size_type size_type;
0719 typedef typename tbb::internal::allocator_traits<allocator_type>::difference_type difference_type;
0720 typedef typename tbb::internal::allocator_traits<allocator_type>::pointer pointer;
0721 typedef typename tbb::internal::allocator_traits<allocator_type>::const_pointer const_pointer;
0722
0723 typedef typename allocator_type::value_type& reference;
0724 typedef const typename allocator_type::value_type& const_reference;
0725
0726 typedef split_ordered_list<value_type, typename Traits::allocator_type> solist_t;
0727 typedef typename solist_t::nodeptr_t nodeptr_t;
0728
0729 typedef typename solist_t::raw_iterator raw_iterator;
0730 typedef typename solist_t::raw_const_iterator raw_const_iterator;
0731 typedef typename solist_t::iterator iterator;
0732 typedef typename solist_t::const_iterator const_iterator;
0733 typedef iterator local_iterator;
0734 typedef const_iterator const_local_iterator;
0735 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT
0736 typedef typename Traits::node_type node_type;
0737 #endif
0738 using Traits::my_hash_compare;
0739 using Traits::get_key;
0740 using Traits::allow_multimapping;
0741
0742 static const size_type initial_bucket_number = 8;
0743
0744 private:
0745 template<typename OtherTraits>
0746 friend class concurrent_unordered_base;
0747
0748 typedef std::pair<iterator, iterator> pairii_t;
0749 typedef std::pair<const_iterator, const_iterator> paircc_t;
0750
0751 static size_type const pointers_per_table = sizeof(size_type) * 8;
0752 static const size_type initial_bucket_load = 4;
0753
0754 struct call_internal_clear_on_exit{
0755 concurrent_unordered_base* my_instance;
0756 call_internal_clear_on_exit(concurrent_unordered_base* instance) : my_instance(instance) {}
0757 void dismiss(){ my_instance = NULL;}
0758 ~call_internal_clear_on_exit(){
0759 if (my_instance){
0760 my_instance->internal_clear();
0761 }
0762 }
0763 };
0764 protected:
0765
0766 concurrent_unordered_base(size_type n_of_buckets = initial_bucket_number,
0767 const hash_compare& hc = hash_compare(), const allocator_type& a = allocator_type())
0768 : Traits(hc), my_solist(a),
0769 my_allocator(a), my_maximum_bucket_size((float) initial_bucket_load)
0770 {
0771 if( n_of_buckets == 0) ++n_of_buckets;
0772 my_number_of_buckets = size_type(1)<<__TBB_Log2((uintptr_t)n_of_buckets*2-1);
0773 internal_init();
0774 }
0775
0776 concurrent_unordered_base(const concurrent_unordered_base& right, const allocator_type& a)
0777 : Traits(right.my_hash_compare), my_solist(a), my_allocator(a)
0778 {
0779 internal_init();
0780 internal_copy(right);
0781 }
0782
0783 concurrent_unordered_base(const concurrent_unordered_base& right)
0784 : Traits(right.my_hash_compare), my_solist(right.get_allocator()), my_allocator(right.get_allocator())
0785 {
0786
0787 internal_init();
0788 internal_copy(right);
0789 }
0790
0791 #if __TBB_CPP11_RVALUE_REF_PRESENT
0792 concurrent_unordered_base(concurrent_unordered_base&& right)
0793 : Traits(right.my_hash_compare), my_solist(right.get_allocator()), my_allocator(right.get_allocator()),
0794 my_maximum_bucket_size(float(initial_bucket_load))
0795 {
0796 my_number_of_buckets = initial_bucket_number;
0797 internal_init();
0798 swap(right);
0799 }
0800
0801 concurrent_unordered_base(concurrent_unordered_base&& right, const allocator_type& a)
0802 : Traits(right.my_hash_compare), my_solist(a), my_allocator(a)
0803 {
0804 call_internal_clear_on_exit clear_buckets_on_exception(this);
0805
0806 internal_init();
0807 if (a == right.get_allocator()){
0808 my_number_of_buckets = initial_bucket_number;
0809 my_maximum_bucket_size = float(initial_bucket_load);
0810 this->swap(right);
0811 }else{
0812 my_maximum_bucket_size = right.my_maximum_bucket_size;
0813 my_number_of_buckets = right.my_number_of_buckets;
0814 my_solist.my_element_count = right.my_solist.my_element_count;
0815
0816 if (! right.my_solist.empty()){
0817 nodeptr_t previous_node = my_solist.my_head;
0818
0819
0820 for (raw_const_iterator it = ++(right.my_solist.raw_begin()), last = right.my_solist.raw_end(); it != last; ++it)
0821 {
0822 const nodeptr_t pnode = it.get_node_ptr();
0823 nodeptr_t node;
0824 if (pnode->is_dummy()) {
0825 node = my_solist.create_node(pnode->get_order_key());
0826 size_type bucket = __TBB_ReverseBits(pnode->get_order_key()) % my_number_of_buckets;
0827 set_bucket(bucket, node);
0828 }else{
0829 node = my_solist.create_node(pnode->get_order_key(), std::move(pnode->my_element));
0830 }
0831
0832 previous_node = my_solist.try_insert_atomic(previous_node, node, NULL);
0833 __TBB_ASSERT(previous_node != NULL, "Insertion of node failed. Concurrent inserts in constructor ?");
0834 }
0835 my_solist.check_range();
0836 }
0837 }
0838
0839 clear_buckets_on_exception.dismiss();
0840 }
0841
0842 #endif
0843
0844 concurrent_unordered_base& operator=(const concurrent_unordered_base& right) {
0845 if (this != &right)
0846 internal_copy(right);
0847 return (*this);
0848 }
0849
0850 #if __TBB_CPP11_RVALUE_REF_PRESENT
0851 concurrent_unordered_base& operator=(concurrent_unordered_base&& other)
0852 {
0853 if(this != &other){
0854 typedef typename tbb::internal::allocator_traits<allocator_type>::propagate_on_container_move_assignment pocma_t;
0855 if(pocma_t::value || this->my_allocator == other.my_allocator) {
0856 concurrent_unordered_base trash (std::move(*this));
0857 swap(other);
0858 if (pocma_t::value) {
0859 using std::swap;
0860
0861 swap(this->my_solist.my_node_allocator, other.my_solist.my_node_allocator);
0862 swap(this->my_allocator, other.my_allocator);
0863 }
0864 } else {
0865 concurrent_unordered_base moved_copy(std::move(other),this->my_allocator);
0866 this->swap(moved_copy);
0867 }
0868 }
0869 return *this;
0870 }
0871
0872 #endif
0873
0874 #if __TBB_INITIALIZER_LISTS_PRESENT
0875
0876 concurrent_unordered_base& operator=(std::initializer_list<value_type> il)
0877 {
0878 this->clear();
0879 this->insert(il.begin(),il.end());
0880 return (*this);
0881 }
0882 #endif
0883
0884
0885 ~concurrent_unordered_base() {
0886
0887 internal_clear();
0888 }
0889
0890 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT
0891 template<typename SourceType>
0892 void internal_merge(SourceType& source) {
0893 typedef typename SourceType::iterator source_iterator;
0894 __TBB_STATIC_ASSERT((tbb::internal::is_same_type<node_type,
0895 typename SourceType::node_type>::value),
0896 "Incompatible containers cannot be merged");
0897
0898 for(source_iterator it = source.begin(); it != source.end();) {
0899 source_iterator where = it++;
0900 if (allow_multimapping || find(get_key(*where)) == end()) {
0901 std::pair<node_type, raw_iterator> extract_result = source.internal_extract(where);
0902
0903
0904 sokey_t old_order_key = extract_result.first.my_node->get_order_key();
0905
0906
0907
0908 if (!insert(std::move(extract_result.first)).second) {
0909 raw_iterator next = extract_result.second;
0910 raw_iterator current = next++;
0911
0912
0913 extract_result.first.my_node->init(old_order_key);
0914
0915 __TBB_ASSERT(extract_result.first.my_node->get_order_key() >= current.get_node_ptr()->get_order_key(),
0916 "Wrong nodes order in source container");
0917 __TBB_ASSERT(next==source.my_solist.raw_end() ||
0918 extract_result.first.my_node->get_order_key() <= next.get_node_ptr()->get_order_key(),
0919 "Wrong nodes order in source container");
0920
0921 size_t new_count = 0;
0922 bool insert_result =
0923 source.my_solist.try_insert(current, next, extract_result.first.my_node, &new_count).second;
0924 __TBB_ASSERT_EX(insert_result, "Return to source must be successful. "
0925 "Changing source container while merging is unsafe.");
0926 }
0927 extract_result.first.deactivate();
0928 }
0929 }
0930 }
0931 #endif
0932
0933 public:
0934 allocator_type get_allocator() const {
0935 return my_solist.get_allocator();
0936 }
0937
0938
0939 bool empty() const {
0940 return my_solist.empty();
0941 }
0942
0943 size_type size() const {
0944 return my_solist.size();
0945 }
0946
0947 size_type max_size() const {
0948 return my_solist.max_size();
0949 }
0950
0951
0952 iterator begin() {
0953 return my_solist.begin();
0954 }
0955
0956 const_iterator begin() const {
0957 return my_solist.begin();
0958 }
0959
0960 iterator end() {
0961 return my_solist.end();
0962 }
0963
0964 const_iterator end() const {
0965 return my_solist.end();
0966 }
0967
0968 const_iterator cbegin() const {
0969 return my_solist.cbegin();
0970 }
0971
0972 const_iterator cend() const {
0973 return my_solist.cend();
0974 }
0975
0976
0977 class const_range_type : tbb::internal::no_assign {
0978 const concurrent_unordered_base &my_table;
0979 raw_const_iterator my_begin_node;
0980 raw_const_iterator my_end_node;
0981 mutable raw_const_iterator my_midpoint_node;
0982 public:
0983
0984 typedef typename concurrent_unordered_base::size_type size_type;
0985 typedef typename concurrent_unordered_base::value_type value_type;
0986 typedef typename concurrent_unordered_base::reference reference;
0987 typedef typename concurrent_unordered_base::difference_type difference_type;
0988 typedef typename concurrent_unordered_base::const_iterator iterator;
0989
0990
0991 bool empty() const {return my_begin_node == my_end_node;}
0992
0993
0994 bool is_divisible() const {
0995 return my_midpoint_node != my_end_node;
0996 }
0997
0998 const_range_type( const_range_type &r, split ) :
0999 my_table(r.my_table), my_end_node(r.my_end_node)
1000 {
1001 r.my_end_node = my_begin_node = r.my_midpoint_node;
1002 __TBB_ASSERT( !empty(), "Splitting despite the range is not divisible" );
1003 __TBB_ASSERT( !r.empty(), "Splitting despite the range is not divisible" );
1004 set_midpoint();
1005 r.set_midpoint();
1006 }
1007
1008 const_range_type( const concurrent_unordered_base &a_table ) :
1009 my_table(a_table), my_begin_node(a_table.my_solist.begin()),
1010 my_end_node(a_table.my_solist.end())
1011 {
1012 set_midpoint();
1013 }
1014 iterator begin() const { return my_table.my_solist.get_iterator(my_begin_node); }
1015 iterator end() const { return my_table.my_solist.get_iterator(my_end_node); }
1016
1017 size_type grainsize() const { return 1; }
1018
1019
1020 void set_midpoint() const {
1021 if( my_begin_node == my_end_node )
1022 my_midpoint_node = my_end_node;
1023 else {
1024 sokey_t begin_key = solist_t::get_safe_order_key(my_begin_node);
1025 sokey_t end_key = solist_t::get_safe_order_key(my_end_node);
1026 size_t mid_bucket = __TBB_ReverseBits( begin_key + (end_key-begin_key)/2 ) % my_table.my_number_of_buckets;
1027 while ( !my_table.is_initialized(mid_bucket) ) mid_bucket = my_table.get_parent(mid_bucket);
1028 if(__TBB_ReverseBits(mid_bucket) > begin_key) {
1029
1030 my_midpoint_node = my_table.my_solist.first_real_iterator(my_table.get_bucket( mid_bucket ));
1031 }
1032 else {
1033
1034 my_midpoint_node = my_end_node;
1035 }
1036 #if TBB_USE_ASSERT
1037 {
1038 sokey_t mid_key = solist_t::get_safe_order_key(my_midpoint_node);
1039 __TBB_ASSERT( begin_key < mid_key, "my_begin_node is after my_midpoint_node" );
1040 __TBB_ASSERT( mid_key <= end_key, "my_midpoint_node is after my_end_node" );
1041 }
1042 #endif
1043 }
1044 }
1045 };
1046
1047 class range_type : public const_range_type {
1048 public:
1049 typedef typename concurrent_unordered_base::iterator iterator;
1050
1051 range_type( range_type &r, split ) : const_range_type( r, split() ) {}
1052
1053 range_type( const concurrent_unordered_base &a_table ) : const_range_type(a_table) {}
1054
1055 iterator begin() const { return solist_t::get_iterator( const_range_type::begin() ); }
1056 iterator end() const { return solist_t::get_iterator( const_range_type::end() ); }
1057 };
1058
1059 range_type range() {
1060 return range_type( *this );
1061 }
1062
1063 const_range_type range() const {
1064 return const_range_type( *this );
1065 }
1066
1067
1068 std::pair<iterator, bool> insert(const value_type& value) {
1069 return internal_insert<tbb::internal::true_type,
1070 tbb::internal::true_type>(value);
1071 }
1072
1073 iterator insert(const_iterator, const value_type& value) {
1074
1075 return insert(value).first;
1076 }
1077
1078 #if __TBB_CPP11_RVALUE_REF_PRESENT
1079 std::pair<iterator, bool> insert(value_type&& value) {
1080 return internal_insert<tbb::internal::true_type,
1081 tbb::internal::true_type>(std::move(value));
1082 }
1083
1084 iterator insert(const_iterator, value_type&& value) {
1085
1086 return insert(std::move(value)).first;
1087 }
1088 #endif
1089
1090 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT
1091 std::pair<iterator, bool> insert(node_type&& nh) {
1092 if (!nh.empty()) {
1093 nodeptr_t handled_node = nh.my_node;
1094 std::pair<iterator, bool> insert_result =
1095 internal_insert<tbb::internal::false_type,
1096 tbb::internal::false_type>
1097 (handled_node->my_element, handled_node);
1098 if (insert_result.second)
1099 nh.deactivate();
1100 return insert_result;
1101 }
1102 return std::pair<iterator, bool>(end(), false);
1103 }
1104
1105 iterator insert(const_iterator, node_type&& nh) {
1106 return insert(std::move(nh)).first;
1107 }
1108 #endif
1109
1110 #if __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT && __TBB_CPP11_RVALUE_REF_PRESENT
1111 template<typename... Args>
1112 std::pair<iterator, bool> emplace(Args&&... args) {
1113 nodeptr_t pnode = my_solist.create_node_v(tbb::internal::forward<Args>(args)...);
1114
1115 return internal_insert<tbb::internal::false_type,
1116 tbb::internal::true_type>(pnode->my_element, pnode);
1117 }
1118
1119 template<typename... Args>
1120 iterator emplace_hint(const_iterator, Args&&... args) {
1121
1122 return emplace(tbb::internal::forward<Args>(args)...).first;
1123 }
1124 #endif
1125
1126
1127 template<class Iterator>
1128 void insert(Iterator first, Iterator last) {
1129 for (Iterator it = first; it != last; ++it)
1130 insert(*it);
1131 }
1132
1133 #if __TBB_INITIALIZER_LISTS_PRESENT
1134
1135 void insert(std::initializer_list<value_type> il) {
1136 insert(il.begin(), il.end());
1137 }
1138 #endif
1139
1140 iterator unsafe_erase(const_iterator where) {
1141 return internal_erase(where);
1142 }
1143
1144 iterator unsafe_erase(const_iterator first, const_iterator last) {
1145 while (first != last)
1146 unsafe_erase(first++);
1147 return my_solist.get_iterator(first);
1148 }
1149
1150 size_type unsafe_erase(const key_type& key) {
1151 pairii_t where = equal_range(key);
1152 size_type item_count = internal_distance(where.first, where.second);
1153 unsafe_erase(where.first, where.second);
1154 return item_count;
1155 }
1156
1157 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT
1158 node_type unsafe_extract(const_iterator where) {
1159 return internal_extract(where).first;
1160 }
1161
1162 node_type unsafe_extract(const key_type& key) {
1163 pairii_t where = equal_range(key);
1164 if (where.first == end()) return node_type();
1165 return internal_extract(where.first).first;
1166 }
1167 #endif
1168
1169 void swap(concurrent_unordered_base& right) {
1170 if (this != &right) {
1171 std::swap(my_hash_compare, right.my_hash_compare);
1172 my_solist.swap(right.my_solist);
1173 internal_swap_buckets(right);
1174 std::swap(my_number_of_buckets, right.my_number_of_buckets);
1175 std::swap(my_maximum_bucket_size, right.my_maximum_bucket_size);
1176 }
1177 }
1178
1179
1180 hasher hash_function() const {
1181 return my_hash_compare.my_hash_object;
1182 }
1183
1184 key_equal key_eq() const {
1185 return my_hash_compare.my_key_compare_object;
1186 }
1187
1188 void clear() {
1189
1190 my_solist.clear();
1191
1192
1193 internal_clear();
1194
1195
1196 __TBB_ASSERT(my_buckets[0] == NULL, NULL);
1197 raw_iterator dummy_node = my_solist.raw_begin();
1198 set_bucket(0, dummy_node);
1199 }
1200
1201
1202 iterator find(const key_type& key) {
1203 return internal_find(key);
1204 }
1205
1206 const_iterator find(const key_type& key) const {
1207 return const_cast<self_type*>(this)->internal_find(key);
1208 }
1209
1210 size_type count(const key_type& key) const {
1211 if(allow_multimapping) {
1212 paircc_t answer = equal_range(key);
1213 size_type item_count = internal_distance(answer.first, answer.second);
1214 return item_count;
1215 } else {
1216 return const_cast<self_type*>(this)->internal_find(key) == end()?0:1;
1217 }
1218 }
1219
1220 std::pair<iterator, iterator> equal_range(const key_type& key) {
1221 return internal_equal_range(key);
1222 }
1223
1224 std::pair<const_iterator, const_iterator> equal_range(const key_type& key) const {
1225 return const_cast<self_type*>(this)->internal_equal_range(key);
1226 }
1227
1228
1229 size_type unsafe_bucket_count() const {
1230 return my_number_of_buckets;
1231 }
1232
1233 size_type unsafe_max_bucket_count() const {
1234 return segment_size(pointers_per_table-1);
1235 }
1236
1237 size_type unsafe_bucket_size(size_type bucket) {
1238 size_type item_count = 0;
1239 if (is_initialized(bucket)) {
1240 raw_iterator it = get_bucket(bucket);
1241 ++it;
1242 for (; it != my_solist.raw_end() && !it.get_node_ptr()->is_dummy(); ++it)
1243 ++item_count;
1244 }
1245 return item_count;
1246 }
1247
1248 size_type unsafe_bucket(const key_type& key) const {
1249 sokey_t order_key = (sokey_t) my_hash_compare(key);
1250 size_type bucket = order_key % my_number_of_buckets;
1251 return bucket;
1252 }
1253
1254
1255 local_iterator unsafe_begin(size_type bucket) {
1256 if (!is_initialized(bucket))
1257 return end();
1258
1259 raw_iterator it = get_bucket(bucket);
1260 return my_solist.first_real_iterator(it);
1261 }
1262
1263
1264 const_local_iterator unsafe_begin(size_type bucket) const
1265 {
1266 if (!is_initialized(bucket))
1267 return end();
1268
1269 raw_const_iterator it = get_bucket(bucket);
1270 return my_solist.first_real_iterator(it);
1271 }
1272
1273
1274
1275 local_iterator unsafe_end(size_type bucket)
1276 {
1277 if (!is_initialized(bucket))
1278 return end();
1279
1280 raw_iterator it = get_bucket(bucket);
1281
1282
1283 do ++it;
1284 while(it != my_solist.raw_end() && !it.get_node_ptr()->is_dummy());
1285
1286
1287 return my_solist.first_real_iterator(it);
1288 }
1289
1290
1291
1292 const_local_iterator unsafe_end(size_type bucket) const
1293 {
1294 if (!is_initialized(bucket))
1295 return end();
1296
1297 raw_const_iterator it = get_bucket(bucket);
1298
1299
1300 do ++it;
1301 while(it != my_solist.raw_end() && !it.get_node_ptr()->is_dummy());
1302
1303
1304 return my_solist.first_real_iterator(it);
1305 }
1306
1307 const_local_iterator unsafe_cbegin(size_type bucket) const {
1308 return ((const self_type *) this)->unsafe_begin(bucket);
1309 }
1310
1311 const_local_iterator unsafe_cend(size_type bucket) const {
1312 return ((const self_type *) this)->unsafe_end(bucket);
1313 }
1314
1315
1316 float load_factor() const {
1317 return (float) size() / (float) unsafe_bucket_count();
1318 }
1319
1320 float max_load_factor() const {
1321 return my_maximum_bucket_size;
1322 }
1323
1324 void max_load_factor(float newmax) {
1325 if (newmax != newmax || newmax < 0)
1326 tbb::internal::throw_exception(tbb::internal::eid_invalid_load_factor);
1327 my_maximum_bucket_size = newmax;
1328 }
1329
1330
1331
1332
1333 void rehash(size_type buckets) {
1334 size_type current_buckets = my_number_of_buckets;
1335 if (current_buckets >= buckets)
1336 return;
1337 my_number_of_buckets = size_type(1)<<__TBB_Log2((uintptr_t)buckets*2-1);
1338 }
1339
1340 private:
1341
1342
1343 void internal_init() {
1344
1345 memset(my_buckets, 0, sizeof(my_buckets));
1346
1347
1348 raw_iterator dummy_node = my_solist.raw_begin();
1349 set_bucket(0, dummy_node);
1350 }
1351
1352 void internal_clear() {
1353 for (size_type index = 0; index < pointers_per_table; ++index) {
1354 if (my_buckets[index] != NULL) {
1355 size_type sz = segment_size(index);
1356 for (size_type index2 = 0; index2 < sz; ++index2)
1357 my_allocator.destroy(&my_buckets[index][index2]);
1358 my_allocator.deallocate(my_buckets[index], sz);
1359 my_buckets[index] = 0;
1360 }
1361 }
1362 }
1363
1364 void internal_copy(const self_type& right) {
1365 clear();
1366
1367 my_maximum_bucket_size = right.my_maximum_bucket_size;
1368 my_number_of_buckets = right.my_number_of_buckets;
1369
1370 __TBB_TRY {
1371 insert(right.begin(), right.end());
1372 my_hash_compare = right.my_hash_compare;
1373 } __TBB_CATCH(...) {
1374 my_solist.clear();
1375 __TBB_RETHROW();
1376 }
1377 }
1378
1379 void internal_swap_buckets(concurrent_unordered_base& right)
1380 {
1381
1382 for (size_type index = 0; index < pointers_per_table; ++index)
1383 {
1384 raw_iterator * iterator_pointer = my_buckets[index];
1385 my_buckets[index] = right.my_buckets[index];
1386 right.my_buckets[index] = iterator_pointer;
1387 }
1388 }
1389
1390
1391
1392 static size_type internal_distance(const_iterator first, const_iterator last)
1393 {
1394 size_type num = 0;
1395
1396 for (const_iterator it = first; it != last; ++it)
1397 ++num;
1398
1399 return num;
1400 }
1401
1402
1403 template<typename AllowCreate, typename AllowDestroy, typename ValueType>
1404 std::pair<iterator, bool> internal_insert(__TBB_FORWARDING_REF(ValueType) value, nodeptr_t pnode = NULL)
1405 {
1406 const key_type *pkey = &get_key(value);
1407 sokey_t hash_key = (sokey_t) my_hash_compare(*pkey);
1408 size_type new_count = 0;
1409 sokey_t order_key = split_order_key_regular(hash_key);
1410 raw_iterator previous = prepare_bucket(hash_key);
1411 raw_iterator last = my_solist.raw_end();
1412 __TBB_ASSERT(previous != last, "Invalid head node");
1413
1414 if (pnode) {
1415
1416 pnode->init(order_key);
1417 }
1418
1419
1420 for (raw_iterator where = previous;;)
1421 {
1422 ++where;
1423 if (where == last || solist_t::get_order_key(where) > order_key ||
1424
1425 (allow_multimapping && solist_t::get_order_key(where) == order_key &&
1426 !my_hash_compare(get_key(*where), *pkey)))
1427 {
1428 if (!pnode) {
1429 pnode = my_solist.create_node(order_key, tbb::internal::forward<ValueType>(value), AllowCreate());
1430
1431 pkey = &get_key(pnode->my_element);
1432 }
1433
1434
1435 std::pair<iterator, bool> result = my_solist.try_insert(previous, where, pnode, &new_count);
1436
1437 if (result.second)
1438 {
1439
1440 adjust_table_size(new_count, my_number_of_buckets);
1441 return result;
1442 }
1443 else
1444 {
1445
1446
1447
1448
1449
1450 where = previous;
1451 continue;
1452 }
1453 }
1454 else if (!allow_multimapping && solist_t::get_order_key(where) == order_key &&
1455 !my_hash_compare(get_key(*where), *pkey))
1456 {
1457 if (pnode && AllowDestroy::value)
1458 my_solist.destroy_node(pnode);
1459 return std::pair<iterator, bool>(my_solist.get_iterator(where), false);
1460 }
1461
1462 previous = where;
1463 }
1464 }
1465
1466
1467 iterator internal_find(const key_type& key)
1468 {
1469 sokey_t hash_key = (sokey_t) my_hash_compare(key);
1470 sokey_t order_key = split_order_key_regular(hash_key);
1471 raw_iterator last = my_solist.raw_end();
1472
1473 for (raw_iterator it = prepare_bucket(hash_key); it != last; ++it)
1474 {
1475 if (solist_t::get_order_key(it) > order_key)
1476 {
1477
1478
1479 return end();
1480 }
1481 else if (solist_t::get_order_key(it) == order_key)
1482 {
1483
1484
1485
1486 if (!my_hash_compare(get_key(*it), key))
1487 return my_solist.get_iterator(it);
1488 }
1489 }
1490
1491 return end();
1492 }
1493
1494
1495 iterator internal_erase(const_iterator it)
1496 {
1497 sokey_t hash_key = (sokey_t) my_hash_compare(get_key(*it));
1498 raw_iterator previous = prepare_bucket(hash_key);
1499 raw_iterator last = my_solist.raw_end();
1500 __TBB_ASSERT(previous != last, "Invalid head node");
1501
1502
1503 for (raw_iterator where = previous; where != last; previous = where) {
1504 ++where;
1505 if (my_solist.get_iterator(where) == it)
1506 return my_solist.erase_node(previous, it);
1507 }
1508 return end();
1509 }
1510
1511 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT
1512 std::pair<node_type, raw_iterator> internal_extract(const_iterator it) {
1513 sokey_t hash_key = sokey_t(my_hash_compare(get_key(*it)));
1514 raw_iterator previous = prepare_bucket(hash_key);
1515 raw_iterator last = my_solist.raw_end();
1516 __TBB_ASSERT(previous != last, "Invalid head node");
1517
1518 for(raw_iterator where = previous; where != last; previous = where) {
1519 ++where;
1520 if (my_solist.get_iterator(where) == it) {
1521 const_iterator result = it;
1522 my_solist.erase_node(previous, it, tbb::internal::false_type());
1523 return std::pair<node_type, raw_iterator>( node_type(result.get_node_ptr()),
1524 previous);
1525 }
1526 }
1527 return std::pair<node_type, iterator>(node_type(), end());
1528 }
1529 #endif
1530
1531
1532
1533 pairii_t internal_equal_range(const key_type& key)
1534 {
1535 sokey_t hash_key = (sokey_t) my_hash_compare(key);
1536 sokey_t order_key = split_order_key_regular(hash_key);
1537 raw_iterator end_it = my_solist.raw_end();
1538
1539 for (raw_iterator it = prepare_bucket(hash_key); it != end_it; ++it)
1540 {
1541 if (solist_t::get_order_key(it) > order_key)
1542 {
1543
1544 return pairii_t(end(), end());
1545 }
1546 else if (solist_t::get_order_key(it) == order_key &&
1547 !my_hash_compare(get_key(*it), key))
1548 {
1549 iterator first = my_solist.get_iterator(it);
1550 iterator last = first;
1551 do ++last; while( allow_multimapping && last != end() && !my_hash_compare(get_key(*last), key) );
1552 return pairii_t(first, last);
1553 }
1554 }
1555
1556 return pairii_t(end(), end());
1557 }
1558
1559
1560 void init_bucket(size_type bucket)
1561 {
1562
1563 __TBB_ASSERT( bucket != 0, "The first bucket must always be initialized");
1564
1565 size_type parent_bucket = get_parent(bucket);
1566
1567
1568 if (!is_initialized(parent_bucket))
1569 init_bucket(parent_bucket);
1570
1571 raw_iterator parent = get_bucket(parent_bucket);
1572
1573
1574 raw_iterator dummy_node = my_solist.insert_dummy(parent, split_order_key_dummy(bucket));
1575 set_bucket(bucket, dummy_node);
1576 }
1577
1578 void adjust_table_size(size_type total_elements, size_type current_size)
1579 {
1580
1581 if ( ((float) total_elements / (float) current_size) > my_maximum_bucket_size )
1582 {
1583
1584 my_number_of_buckets.compare_and_swap(2u*current_size, current_size);
1585
1586
1587 }
1588 }
1589
1590 size_type get_parent(size_type bucket) const
1591 {
1592
1593 size_type msb = __TBB_Log2((uintptr_t)bucket);
1594 return bucket & ~(size_type(1) << msb);
1595 }
1596
1597
1598
1599
1600 static size_type segment_index_of( size_type index ) {
1601 return size_type( __TBB_Log2( uintptr_t(index|1) ) );
1602 }
1603
1604
1605 static size_type segment_base( size_type k ) {
1606 return (size_type(1)<<k & ~size_type(1));
1607 }
1608
1609
1610 static size_type segment_size( size_type k ) {
1611 return k? size_type(1)<<k : 2;
1612 }
1613
1614 raw_iterator get_bucket(size_type bucket) const {
1615 size_type segment = segment_index_of(bucket);
1616 bucket -= segment_base(segment);
1617 __TBB_ASSERT( my_buckets[segment], "bucket must be in an allocated segment" );
1618 return my_buckets[segment][bucket];
1619 }
1620
1621 raw_iterator prepare_bucket(sokey_t hash_key) {
1622 size_type bucket = hash_key % my_number_of_buckets;
1623 size_type segment = segment_index_of(bucket);
1624 size_type index = bucket - segment_base(segment);
1625 if (my_buckets[segment] == NULL || my_buckets[segment][index].get_node_ptr() == NULL)
1626 init_bucket(bucket);
1627 return my_buckets[segment][index];
1628 }
1629
1630 void set_bucket(size_type bucket, raw_iterator dummy_head) {
1631 size_type segment = segment_index_of(bucket);
1632 bucket -= segment_base(segment);
1633
1634 if (my_buckets[segment] == NULL) {
1635 size_type sz = segment_size(segment);
1636 raw_iterator * new_segment = my_allocator.allocate(sz);
1637 std::memset(static_cast<void*>(new_segment), 0, sz*sizeof(raw_iterator));
1638
1639 if (my_buckets[segment].compare_and_swap( new_segment, NULL) != NULL)
1640 my_allocator.deallocate(new_segment, sz);
1641 }
1642
1643 my_buckets[segment][bucket] = dummy_head;
1644 }
1645
1646 bool is_initialized(size_type bucket) const {
1647 size_type segment = segment_index_of(bucket);
1648 bucket -= segment_base(segment);
1649
1650 if (my_buckets[segment] == NULL)
1651 return false;
1652
1653 raw_iterator it = my_buckets[segment][bucket];
1654 return (it.get_node_ptr() != NULL);
1655 }
1656
1657
1658
1659
1660 sokey_t split_order_key_regular(sokey_t order_key) const {
1661 return __TBB_ReverseBits(order_key) | 0x1;
1662 }
1663
1664
1665 sokey_t split_order_key_dummy(sokey_t order_key) const {
1666 return __TBB_ReverseBits(order_key) & ~sokey_t(0x1);
1667 }
1668
1669
1670 atomic<size_type> my_number_of_buckets;
1671 solist_t my_solist;
1672 typename tbb::internal::allocator_rebind<allocator_type, raw_iterator>::type my_allocator;
1673 float my_maximum_bucket_size;
1674 atomic<raw_iterator*> my_buckets[pointers_per_table];
1675 };
1676 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER)
1677 #pragma warning(pop)
1678 #endif
1679
1680 }
1681
1682 }
1683 }
1684 #endif