File indexing completed on 2025-01-18 09:39:18
0001
0002
0003
0004
0005
0006
0007 #ifndef BOOST_LOCKFREE_STACK_HPP_INCLUDED
0008 #define BOOST_LOCKFREE_STACK_HPP_INCLUDED
0009
0010 #include <boost/assert.hpp>
0011 #include <boost/checked_delete.hpp>
0012 #include <boost/core/allocator_access.hpp>
0013 #include <boost/core/no_exceptions_support.hpp>
0014 #include <boost/integer_traits.hpp>
0015 #include <boost/static_assert.hpp>
0016 #include <boost/tuple/tuple.hpp>
0017 #include <boost/type_traits/is_copy_constructible.hpp>
0018
0019 #include <boost/lockfree/detail/atomic.hpp>
0020 #include <boost/lockfree/detail/copy_payload.hpp>
0021 #include <boost/lockfree/detail/freelist.hpp>
0022 #include <boost/lockfree/detail/parameter.hpp>
0023 #include <boost/lockfree/detail/tagged_ptr.hpp>
0024
0025 #include <boost/lockfree/lockfree_forward.hpp>
0026
0027 #ifdef BOOST_HAS_PRAGMA_ONCE
0028 #pragma once
0029 #endif
0030
0031 namespace boost {
0032 namespace lockfree {
0033 namespace detail {
0034
0035 typedef parameter::parameters<boost::parameter::optional<tag::allocator>,
0036 boost::parameter::optional<tag::capacity>
0037 > stack_signature;
0038
0039 }
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064 #ifdef BOOST_NO_CXX11_VARIADIC_TEMPLATES
0065 template <typename T, class A0, class A1, class A2>
0066 #else
0067 template <typename T, typename ...Options>
0068 #endif
0069 class stack
0070 {
0071 private:
0072 #ifndef BOOST_DOXYGEN_INVOKED
0073 BOOST_STATIC_ASSERT(boost::is_copy_constructible<T>::value);
0074
0075 #ifdef BOOST_NO_CXX11_VARIADIC_TEMPLATES
0076 typedef typename detail::stack_signature::bind<A0, A1, A2>::type bound_args;
0077 #else
0078 typedef typename detail::stack_signature::bind<Options...>::type bound_args;
0079 #endif
0080
0081 static const bool has_capacity = detail::extract_capacity<bound_args>::has_capacity;
0082 static const size_t capacity = detail::extract_capacity<bound_args>::capacity;
0083 static const bool fixed_sized = detail::extract_fixed_sized<bound_args>::value;
0084 static const bool node_based = !(has_capacity || fixed_sized);
0085 static const bool compile_time_sized = has_capacity;
0086
0087 struct node
0088 {
0089 node(T const & val):
0090 v(val)
0091 {}
0092
0093 typedef typename detail::select_tagged_handle<node, node_based>::handle_type handle_t;
0094 handle_t next;
0095 const T v;
0096 };
0097
0098 typedef typename detail::extract_allocator<bound_args, node>::type node_allocator;
0099 typedef typename detail::select_freelist<node, node_allocator, compile_time_sized, fixed_sized, capacity>::type pool_t;
0100 typedef typename pool_t::tagged_node_handle tagged_node_handle;
0101
0102
0103 BOOST_STATIC_ASSERT((mpl::if_c<has_capacity,
0104 mpl::bool_<capacity - 1 < boost::integer_traits<boost::uint16_t>::const_max>,
0105 mpl::true_
0106 >::type::value));
0107
0108 struct implementation_defined
0109 {
0110 typedef node_allocator allocator;
0111 typedef std::size_t size_type;
0112 };
0113
0114 #endif
0115
0116 BOOST_DELETED_FUNCTION(stack(stack const&))
0117 BOOST_DELETED_FUNCTION(stack& operator= (stack const&))
0118
0119 public:
0120 typedef T value_type;
0121 typedef typename implementation_defined::allocator allocator;
0122 typedef typename implementation_defined::size_type size_type;
0123
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133 bool is_lock_free (void) const
0134 {
0135 return tos.is_lock_free() && pool.is_lock_free();
0136 }
0137
0138
0139
0140
0141
0142 stack(void):
0143 pool(node_allocator(), capacity)
0144 {
0145
0146
0147 BOOST_ASSERT(has_capacity);
0148 initialize();
0149 }
0150
0151
0152
0153
0154
0155 template <typename U>
0156 explicit stack(typename boost::allocator_rebind<node_allocator, U>::type const & alloc):
0157 pool(alloc, capacity)
0158 {
0159 BOOST_STATIC_ASSERT(has_capacity);
0160 initialize();
0161 }
0162
0163
0164
0165
0166
0167 explicit stack(allocator const & alloc):
0168 pool(alloc, capacity)
0169 {
0170
0171
0172 BOOST_ASSERT(has_capacity);
0173 initialize();
0174 }
0175
0176
0177
0178
0179
0180
0181
0182 explicit stack(size_type n):
0183 pool(node_allocator(), n)
0184 {
0185
0186
0187 BOOST_ASSERT(!has_capacity);
0188 initialize();
0189 }
0190
0191
0192
0193
0194
0195
0196
0197 template <typename U>
0198 stack(size_type n, typename boost::allocator_rebind<node_allocator, U>::type const & alloc):
0199 pool(alloc, n)
0200 {
0201 BOOST_STATIC_ASSERT(!has_capacity);
0202 initialize();
0203 }
0204
0205
0206
0207
0208
0209
0210
0211 void reserve(size_type n)
0212 {
0213
0214
0215 BOOST_ASSERT(!has_capacity);
0216 pool.template reserve<true>(n);
0217 }
0218
0219
0220
0221
0222
0223
0224
0225 void reserve_unsafe(size_type n)
0226 {
0227
0228
0229 BOOST_ASSERT(!has_capacity);
0230 pool.template reserve<false>(n);
0231 }
0232
0233
0234
0235
0236
0237
0238 ~stack(void)
0239 {
0240 detail::consume_noop consume_functor;
0241 (void)consume_all(consume_functor);
0242 }
0243
0244 private:
0245 #ifndef BOOST_DOXYGEN_INVOKED
0246 void initialize(void)
0247 {
0248 tos.store(tagged_node_handle(pool.null_handle(), 0));
0249 }
0250
0251 void link_nodes_atomic(node * new_top_node, node * end_node)
0252 {
0253 tagged_node_handle old_tos = tos.load(detail::memory_order_relaxed);
0254 for (;;) {
0255 tagged_node_handle new_tos (pool.get_handle(new_top_node), old_tos.get_tag());
0256 end_node->next = pool.get_handle(old_tos);
0257
0258 if (tos.compare_exchange_weak(old_tos, new_tos))
0259 break;
0260 }
0261 }
0262
0263 void link_nodes_unsafe(node * new_top_node, node * end_node)
0264 {
0265 tagged_node_handle old_tos = tos.load(detail::memory_order_relaxed);
0266
0267 tagged_node_handle new_tos (pool.get_handle(new_top_node), old_tos.get_tag());
0268 end_node->next = pool.get_handle(old_tos);
0269
0270 tos.store(new_tos, memory_order_relaxed);
0271 }
0272
0273 template <bool Threadsafe, bool Bounded, typename ConstIterator>
0274 tuple<node*, node*> prepare_node_list(ConstIterator begin, ConstIterator end, ConstIterator & ret)
0275 {
0276 ConstIterator it = begin;
0277 node * end_node = pool.template construct<Threadsafe, Bounded>(*it++);
0278 if (end_node == NULL) {
0279 ret = begin;
0280 return make_tuple<node*, node*>(NULL, NULL);
0281 }
0282
0283 node * new_top_node = end_node;
0284 end_node->next = NULL;
0285
0286 BOOST_TRY {
0287
0288 for (; it != end; ++it) {
0289 node * newnode = pool.template construct<Threadsafe, Bounded>(*it);
0290 if (newnode == NULL)
0291 break;
0292 newnode->next = new_top_node;
0293 new_top_node = newnode;
0294 }
0295 } BOOST_CATCH (...) {
0296 for (node * current_node = new_top_node; current_node != NULL;) {
0297 node * next = current_node->next;
0298 pool.template destruct<Threadsafe>(current_node);
0299 current_node = next;
0300 }
0301 BOOST_RETHROW;
0302 }
0303 BOOST_CATCH_END
0304
0305 ret = it;
0306 return make_tuple(new_top_node, end_node);
0307 }
0308 #endif
0309
0310 public:
0311
0312
0313
0314
0315
0316
0317
0318
0319
0320 bool push(T const & v)
0321 {
0322 return do_push<false>(v);
0323 }
0324
0325
0326
0327
0328
0329
0330
0331
0332 bool bounded_push(T const & v)
0333 {
0334 return do_push<true>(v);
0335 }
0336
0337 #ifndef BOOST_DOXYGEN_INVOKED
0338 private:
0339 template <bool Bounded>
0340 bool do_push(T const & v)
0341 {
0342 node * newnode = pool.template construct<true, Bounded>(v);
0343 if (newnode == 0)
0344 return false;
0345
0346 link_nodes_atomic(newnode, newnode);
0347 return true;
0348 }
0349
0350 template <bool Bounded, typename ConstIterator>
0351 ConstIterator do_push(ConstIterator begin, ConstIterator end)
0352 {
0353 node * new_top_node;
0354 node * end_node;
0355 ConstIterator ret;
0356
0357 tie(new_top_node, end_node) = prepare_node_list<true, Bounded>(begin, end, ret);
0358 if (new_top_node)
0359 link_nodes_atomic(new_top_node, end_node);
0360
0361 return ret;
0362 }
0363
0364 public:
0365 #endif
0366
0367
0368
0369
0370
0371
0372
0373
0374
0375
0376 template <typename ConstIterator>
0377 ConstIterator push(ConstIterator begin, ConstIterator end)
0378 {
0379 return do_push<false, ConstIterator>(begin, end);
0380 }
0381
0382
0383
0384
0385
0386
0387
0388
0389
0390 template <typename ConstIterator>
0391 ConstIterator bounded_push(ConstIterator begin, ConstIterator end)
0392 {
0393 return do_push<true, ConstIterator>(begin, end);
0394 }
0395
0396
0397
0398
0399
0400
0401
0402
0403
0404
0405
0406 bool unsynchronized_push(T const & v)
0407 {
0408 node * newnode = pool.template construct<false, false>(v);
0409 if (newnode == 0)
0410 return false;
0411
0412 link_nodes_unsafe(newnode, newnode);
0413 return true;
0414 }
0415
0416
0417
0418
0419
0420
0421
0422
0423
0424 template <typename ConstIterator>
0425 ConstIterator unsynchronized_push(ConstIterator begin, ConstIterator end)
0426 {
0427 node * new_top_node;
0428 node * end_node;
0429 ConstIterator ret;
0430
0431 tie(new_top_node, end_node) = prepare_node_list<false, false>(begin, end, ret);
0432 if (new_top_node)
0433 link_nodes_unsafe(new_top_node, end_node);
0434
0435 return ret;
0436 }
0437
0438
0439
0440
0441
0442
0443
0444
0445
0446
0447 bool pop(T & ret)
0448 {
0449 return pop<T>(ret);
0450 }
0451
0452
0453
0454
0455
0456
0457
0458
0459
0460
0461 template <typename U>
0462 bool pop(U & ret)
0463 {
0464 BOOST_STATIC_ASSERT((boost::is_convertible<T, U>::value));
0465 detail::consume_via_copy<U> consumer(ret);
0466
0467 return consume_one(consumer);
0468 }
0469
0470
0471
0472
0473
0474
0475
0476
0477
0478
0479 bool unsynchronized_pop(T & ret)
0480 {
0481 return unsynchronized_pop<T>(ret);
0482 }
0483
0484
0485
0486
0487
0488
0489
0490
0491
0492
0493 template <typename U>
0494 bool unsynchronized_pop(U & ret)
0495 {
0496 BOOST_STATIC_ASSERT((boost::is_convertible<T, U>::value));
0497 tagged_node_handle old_tos = tos.load(detail::memory_order_relaxed);
0498 node * old_tos_pointer = pool.get_pointer(old_tos);
0499
0500 if (!pool.get_pointer(old_tos))
0501 return false;
0502
0503 node * new_tos_ptr = pool.get_pointer(old_tos_pointer->next);
0504 tagged_node_handle new_tos(pool.get_handle(new_tos_ptr), old_tos.get_next_tag());
0505
0506 tos.store(new_tos, memory_order_relaxed);
0507 detail::copy_payload(old_tos_pointer->v, ret);
0508 pool.template destruct<false>(old_tos);
0509 return true;
0510 }
0511
0512
0513
0514
0515
0516
0517
0518
0519
0520 template <typename Functor>
0521 bool consume_one(Functor & f)
0522 {
0523 tagged_node_handle old_tos = tos.load(detail::memory_order_consume);
0524
0525 for (;;) {
0526 node * old_tos_pointer = pool.get_pointer(old_tos);
0527 if (!old_tos_pointer)
0528 return false;
0529
0530 tagged_node_handle new_tos(old_tos_pointer->next, old_tos.get_next_tag());
0531
0532 if (tos.compare_exchange_weak(old_tos, new_tos)) {
0533 f(old_tos_pointer->v);
0534 pool.template destruct<true>(old_tos);
0535 return true;
0536 }
0537 }
0538 }
0539
0540
0541 template <typename Functor>
0542 bool consume_one(Functor const & f)
0543 {
0544 tagged_node_handle old_tos = tos.load(detail::memory_order_consume);
0545
0546 for (;;) {
0547 node * old_tos_pointer = pool.get_pointer(old_tos);
0548 if (!old_tos_pointer)
0549 return false;
0550
0551 tagged_node_handle new_tos(old_tos_pointer->next, old_tos.get_next_tag());
0552
0553 if (tos.compare_exchange_weak(old_tos, new_tos)) {
0554 f(old_tos_pointer->v);
0555 pool.template destruct<true>(old_tos);
0556 return true;
0557 }
0558 }
0559 }
0560
0561
0562
0563
0564
0565
0566
0567
0568
0569 template <typename Functor>
0570 size_t consume_all(Functor & f)
0571 {
0572 size_t element_count = 0;
0573 while (consume_one(f))
0574 element_count += 1;
0575
0576 return element_count;
0577 }
0578
0579
0580 template <typename Functor>
0581 size_t consume_all(Functor const & f)
0582 {
0583 size_t element_count = 0;
0584 while (consume_one(f))
0585 element_count += 1;
0586
0587 return element_count;
0588 }
0589
0590
0591
0592
0593
0594
0595
0596
0597
0598 template <typename Functor>
0599 size_t consume_all_atomic(Functor & f)
0600 {
0601 size_t element_count = 0;
0602 tagged_node_handle old_tos = tos.load(detail::memory_order_consume);
0603
0604 for (;;) {
0605 node * old_tos_pointer = pool.get_pointer(old_tos);
0606 if (!old_tos_pointer)
0607 return 0;
0608
0609 tagged_node_handle new_tos(pool.null_handle(), old_tos.get_next_tag());
0610
0611 if (tos.compare_exchange_weak(old_tos, new_tos))
0612 break;
0613 }
0614
0615 tagged_node_handle nodes_to_consume = old_tos;
0616
0617 for(;;) {
0618 node * node_pointer = pool.get_pointer(nodes_to_consume);
0619 f(node_pointer->v);
0620 element_count += 1;
0621
0622 node * next_node = pool.get_pointer(node_pointer->next);
0623
0624 if (!next_node) {
0625 pool.template destruct<true>(nodes_to_consume);
0626 break;
0627 }
0628
0629 tagged_node_handle next(pool.get_handle(next_node), nodes_to_consume.get_next_tag());
0630 pool.template destruct<true>(nodes_to_consume);
0631 nodes_to_consume = next;
0632 }
0633
0634 return element_count;
0635 }
0636
0637
0638 template <typename Functor>
0639 size_t consume_all_atomic(Functor const & f)
0640 {
0641 size_t element_count = 0;
0642 tagged_node_handle old_tos = tos.load(detail::memory_order_consume);
0643
0644 for (;;) {
0645 node * old_tos_pointer = pool.get_pointer(old_tos);
0646 if (!old_tos_pointer)
0647 return 0;
0648
0649 tagged_node_handle new_tos(pool.null_handle(), old_tos.get_next_tag());
0650
0651 if (tos.compare_exchange_weak(old_tos, new_tos))
0652 break;
0653 }
0654
0655 tagged_node_handle nodes_to_consume = old_tos;
0656
0657 for(;;) {
0658 node * node_pointer = pool.get_pointer(nodes_to_consume);
0659 f(node_pointer->v);
0660 element_count += 1;
0661
0662 node * next_node = pool.get_pointer(node_pointer->next);
0663
0664 if (!next_node) {
0665 pool.template destruct<true>(nodes_to_consume);
0666 break;
0667 }
0668
0669 tagged_node_handle next(pool.get_handle(next_node), nodes_to_consume.get_next_tag());
0670 pool.template destruct<true>(nodes_to_consume);
0671 nodes_to_consume = next;
0672 }
0673
0674 return element_count;
0675 }
0676
0677
0678
0679
0680
0681
0682
0683
0684
0685 template <typename Functor>
0686 size_t consume_all_atomic_reversed(Functor & f)
0687 {
0688 size_t element_count = 0;
0689 tagged_node_handle old_tos = tos.load(detail::memory_order_consume);
0690
0691 for (;;) {
0692 node * old_tos_pointer = pool.get_pointer(old_tos);
0693 if (!old_tos_pointer)
0694 return 0;
0695
0696 tagged_node_handle new_tos(pool.null_handle(), old_tos.get_next_tag());
0697
0698 if (tos.compare_exchange_weak(old_tos, new_tos))
0699 break;
0700 }
0701
0702 tagged_node_handle nodes_to_consume = old_tos;
0703
0704 node * last_node_pointer = NULL;
0705 tagged_node_handle nodes_in_reversed_order;
0706 for(;;) {
0707 node * node_pointer = pool.get_pointer(nodes_to_consume);
0708 node * next_node = pool.get_pointer(node_pointer->next);
0709
0710 node_pointer->next = pool.get_handle(last_node_pointer);
0711 last_node_pointer = node_pointer;
0712
0713 if (!next_node) {
0714 nodes_in_reversed_order = nodes_to_consume;
0715 break;
0716 }
0717
0718 tagged_node_handle next(pool.get_handle(next_node), nodes_to_consume.get_next_tag());
0719 nodes_to_consume = next;
0720 }
0721
0722 for(;;) {
0723 node * node_pointer = pool.get_pointer(nodes_in_reversed_order);
0724 f(node_pointer->v);
0725 element_count += 1;
0726
0727 node * next_node = pool.get_pointer(node_pointer->next);
0728
0729 if (!next_node) {
0730 pool.template destruct<true>(nodes_in_reversed_order);
0731 break;
0732 }
0733
0734 tagged_node_handle next(pool.get_handle(next_node), nodes_in_reversed_order.get_next_tag());
0735 pool.template destruct<true>(nodes_in_reversed_order);
0736 nodes_in_reversed_order = next;
0737 }
0738
0739 return element_count;
0740 }
0741
0742
0743 template <typename Functor>
0744 size_t consume_all_atomic_reversed(Functor const & f)
0745 {
0746 size_t element_count = 0;
0747 tagged_node_handle old_tos = tos.load(detail::memory_order_consume);
0748
0749 for (;;) {
0750 node * old_tos_pointer = pool.get_pointer(old_tos);
0751 if (!old_tos_pointer)
0752 return 0;
0753
0754 tagged_node_handle new_tos(pool.null_handle(), old_tos.get_next_tag());
0755
0756 if (tos.compare_exchange_weak(old_tos, new_tos))
0757 break;
0758 }
0759
0760 tagged_node_handle nodes_to_consume = old_tos;
0761
0762 node * last_node_pointer = NULL;
0763 tagged_node_handle nodes_in_reversed_order;
0764 for(;;) {
0765 node * node_pointer = pool.get_pointer(nodes_to_consume);
0766 node * next_node = pool.get_pointer(node_pointer->next);
0767
0768 node_pointer->next = pool.get_handle(last_node_pointer);
0769 last_node_pointer = node_pointer;
0770
0771 if (!next_node) {
0772 nodes_in_reversed_order = nodes_to_consume;
0773 break;
0774 }
0775
0776 tagged_node_handle next(pool.get_handle(next_node), nodes_to_consume.get_next_tag());
0777 nodes_to_consume = next;
0778 }
0779
0780 for(;;) {
0781 node * node_pointer = pool.get_pointer(nodes_in_reversed_order);
0782 f(node_pointer->v);
0783 element_count += 1;
0784
0785 node * next_node = pool.get_pointer(node_pointer->next);
0786
0787 if (!next_node) {
0788 pool.template destruct<true>(nodes_in_reversed_order);
0789 break;
0790 }
0791
0792 tagged_node_handle next(pool.get_handle(next_node), nodes_in_reversed_order.get_next_tag());
0793 pool.template destruct<true>(nodes_in_reversed_order);
0794 nodes_in_reversed_order = next;
0795 }
0796
0797 return element_count;
0798 }
0799
0800
0801
0802
0803
0804
0805 bool empty(void) const
0806 {
0807 return pool.get_pointer(tos.load()) == NULL;
0808 }
0809
0810 private:
0811 #ifndef BOOST_DOXYGEN_INVOKED
0812 detail::atomic<tagged_node_handle> tos;
0813
0814 static const int padding_size = BOOST_LOCKFREE_CACHELINE_BYTES - sizeof(tagged_node_handle);
0815 char padding[padding_size];
0816
0817 pool_t pool;
0818 #endif
0819 };
0820
0821 }
0822 }
0823
0824 #endif