File indexing completed on 2025-09-18 08:47:54
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011 #ifndef BOOST_LOCKFREE_FIFO_HPP_INCLUDED
0012 #define BOOST_LOCKFREE_FIFO_HPP_INCLUDED
0013
0014 #include <boost/config.hpp>
0015 #ifdef BOOST_HAS_PRAGMA_ONCE
0016 # pragma once
0017 #endif
0018
0019 #include <boost/assert.hpp>
0020 #include <boost/core/allocator_access.hpp>
0021 #include <boost/parameter/optional.hpp>
0022 #include <boost/parameter/parameters.hpp>
0023 #include <boost/static_assert.hpp>
0024
0025 #include <boost/lockfree/detail/atomic.hpp>
0026 #include <boost/lockfree/detail/copy_payload.hpp>
0027 #include <boost/lockfree/detail/freelist.hpp>
0028 #include <boost/lockfree/detail/parameter.hpp>
0029 #include <boost/lockfree/detail/tagged_ptr.hpp>
0030 #include <boost/lockfree/detail/uses_optional.hpp>
0031 #include <boost/lockfree/lockfree_forward.hpp>
0032
0033
0034 #if defined( _MSC_VER )
0035 # pragma warning( push )
0036 # pragma warning( disable : 4324 )
0037 #endif
0038
0039 #if defined( BOOST_INTEL ) && ( BOOST_INTEL_CXX_VERSION > 1000 )
0040 # pragma warning( push )
0041 # pragma warning( disable : 488 )
0042
0043
0044 #endif
0045
0046
0047 namespace boost { namespace lockfree {
0048
0049 #ifndef BOOST_DOXYGEN_INVOKED
0050 namespace detail {
0051
0052 typedef parameter::parameters< boost::parameter::optional< tag::allocator >, boost::parameter::optional< tag::capacity > >
0053 queue_signature;
0054
0055 }
0056 #endif
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084 template < typename T, typename... Options >
0085 #if !defined( BOOST_NO_CXX20_HDR_CONCEPTS )
0086 requires( std::is_copy_assignable_v< T >,
0087 std::is_trivially_assignable_v< T&, T >,
0088 std::is_trivially_destructible_v< T > )
0089 #endif
0090 class queue
0091 {
0092 private:
0093 #ifndef BOOST_DOXYGEN_INVOKED
0094
0095 BOOST_STATIC_ASSERT( ( std::is_trivially_destructible< T >::value ) );
0096 BOOST_STATIC_ASSERT( ( std::is_trivially_assignable< T&, T >::value ) );
0097
0098 typedef typename detail::queue_signature::bind< Options... >::type bound_args;
0099
0100 static constexpr bool has_capacity = detail::extract_capacity< bound_args >::has_capacity;
0101 static constexpr size_t capacity
0102 = detail::extract_capacity< bound_args >::capacity + 1;
0103 static constexpr bool fixed_sized = detail::extract_fixed_sized< bound_args >::value;
0104 static constexpr bool node_based = !( has_capacity || fixed_sized );
0105 static constexpr bool compile_time_sized = has_capacity;
0106
0107 struct alignas( detail::cacheline_bytes ) node
0108 {
0109 typedef typename detail::select_tagged_handle< node, node_based >::tagged_handle_type tagged_node_handle;
0110 typedef typename detail::select_tagged_handle< node, node_based >::handle_type handle_type;
0111
0112 node( T const& v, handle_type null_handle ) :
0113 data( v )
0114 {
0115
0116 tagged_node_handle old_next = next.load( memory_order_relaxed );
0117 tagged_node_handle new_next( null_handle, old_next.get_next_tag() );
0118 next.store( new_next, memory_order_release );
0119 }
0120
0121 node( handle_type null_handle ) :
0122 next( tagged_node_handle( null_handle, 0 ) )
0123 {}
0124
0125 node( void )
0126 {}
0127
0128 atomic< tagged_node_handle > next;
0129 T data;
0130 };
0131
0132 typedef detail::extract_allocator_t< bound_args, node > node_allocator;
0133 typedef detail::select_freelist_t< node, node_allocator, compile_time_sized, fixed_sized, capacity > pool_t;
0134 typedef typename pool_t::tagged_node_handle tagged_node_handle;
0135 typedef typename detail::select_tagged_handle< node, node_based >::handle_type handle_type;
0136
0137 void initialize( void )
0138 {
0139 node* n = pool.template construct< true, false >( pool.null_handle() );
0140 tagged_node_handle dummy_node( pool.get_handle( n ), 0 );
0141 head_.store( dummy_node, memory_order_relaxed );
0142 tail_.store( dummy_node, memory_order_release );
0143 }
0144
0145 struct implementation_defined
0146 {
0147 typedef node_allocator allocator;
0148 typedef std::size_t size_type;
0149 };
0150
0151 #endif
0152
0153 public:
0154 typedef T value_type;
0155 typedef typename implementation_defined::allocator allocator;
0156 typedef typename implementation_defined::size_type size_type;
0157
0158
0159
0160
0161
0162
0163
0164
0165
0166 bool is_lock_free( void ) const
0167 {
0168 return head_.is_lock_free() && tail_.is_lock_free() && pool.is_lock_free();
0169 }
0170
0171
0172
0173
0174
0175 queue( void )
0176 #if !defined( BOOST_NO_CXX20_HDR_CONCEPTS )
0177 requires( has_capacity )
0178 #endif
0179 :
0180 head_( tagged_node_handle( 0, 0 ) ),
0181 tail_( tagged_node_handle( 0, 0 ) ),
0182 pool( node_allocator(), capacity )
0183 {
0184
0185
0186 BOOST_ASSERT( has_capacity );
0187 initialize();
0188 }
0189
0190
0191
0192
0193
0194 template < typename U, typename Enabler = std::enable_if< has_capacity > >
0195 explicit queue( typename boost::allocator_rebind< node_allocator, U >::type const& alloc ) :
0196 head_( tagged_node_handle( 0, 0 ) ),
0197 tail_( tagged_node_handle( 0, 0 ) ),
0198 pool( alloc, capacity )
0199 {
0200 initialize();
0201 }
0202
0203
0204
0205
0206
0207 template < typename Enabler = std::enable_if< has_capacity > >
0208 explicit queue( allocator const& alloc ) :
0209 head_( tagged_node_handle( 0, 0 ) ),
0210 tail_( tagged_node_handle( 0, 0 ) ),
0211 pool( alloc, capacity )
0212 {
0213 initialize();
0214 }
0215
0216
0217
0218
0219
0220
0221
0222 template < typename Enabler = std::enable_if< !has_capacity > >
0223 explicit queue( size_type n ) :
0224 head_( tagged_node_handle( 0, 0 ) ),
0225 tail_( tagged_node_handle( 0, 0 ) ),
0226 pool( node_allocator(), n + 1 )
0227 {
0228 initialize();
0229 }
0230
0231
0232
0233
0234
0235
0236
0237 template < typename U, typename Enabler = std::enable_if< !has_capacity > >
0238 queue( size_type n, typename boost::allocator_rebind< node_allocator, U >::type const& alloc ) :
0239 head_( tagged_node_handle( 0, 0 ) ),
0240 tail_( tagged_node_handle( 0, 0 ) ),
0241 pool( alloc, n + 1 )
0242 {
0243 initialize();
0244 }
0245
0246
0247
0248
0249
0250
0251
0252 template < typename Enabler = std::enable_if< !has_capacity > >
0253 queue( size_type n, allocator const& alloc ) :
0254 head_( tagged_node_handle( 0, 0 ) ),
0255 tail_( tagged_node_handle( 0, 0 ) ),
0256 pool( alloc, n + 1 )
0257 {
0258 initialize();
0259 }
0260
0261 queue( const queue& ) = delete;
0262 queue& operator=( const queue& ) = delete;
0263 queue( queue&& ) = delete;
0264 queue& operator=( queue&& ) = delete;
0265
0266
0267
0268 void reserve( size_type n )
0269 {
0270 pool.template reserve< true >( n );
0271 }
0272
0273
0274
0275 void reserve_unsafe( size_type n )
0276 {
0277 pool.template reserve< false >( n );
0278 }
0279
0280
0281
0282 ~queue( void )
0283 {
0284 consume_all( []( const T& ) {} );
0285
0286 pool.template destruct< false >( head_.load( memory_order_relaxed ) );
0287 }
0288
0289
0290
0291
0292
0293
0294
0295 bool empty( void ) const
0296 {
0297 return pool.get_handle( head_.load() ) == pool.get_handle( tail_.load() );
0298 }
0299
0300
0301
0302
0303
0304
0305
0306
0307
0308 bool push( const T& t )
0309 {
0310 return do_push< false >( t );
0311 }
0312
0313
0314 bool push( T&& t )
0315 {
0316 return do_push< false >( std::forward< T >( t ) );
0317 }
0318
0319
0320
0321
0322
0323
0324
0325
0326
0327 bool bounded_push( const T& t )
0328 {
0329 return do_push< true >( t );
0330 }
0331
0332
0333 bool bounded_push( T&& t )
0334 {
0335 return do_push< true >( std::forward< T >( t ) );
0336 }
0337
0338
0339 private:
0340 #ifndef BOOST_DOXYGEN_INVOKED
0341 template < bool Bounded >
0342 bool do_push( T&& t )
0343 {
0344 node* n = pool.template construct< true, Bounded >( std::forward< T >( t ), pool.null_handle() );
0345 return do_push_node( n );
0346 }
0347
0348 template < bool Bounded >
0349 bool do_push( T const& t )
0350 {
0351 node* n = pool.template construct< true, Bounded >( t, pool.null_handle() );
0352 return do_push_node( n );
0353 }
0354
0355 bool do_push_node( node* n )
0356 {
0357 handle_type node_handle = pool.get_handle( n );
0358
0359 if ( n == NULL )
0360 return false;
0361
0362 for ( ;; ) {
0363 tagged_node_handle tail = tail_.load( memory_order_acquire );
0364 node* tail_node = pool.get_pointer( tail );
0365 tagged_node_handle next = tail_node->next.load( memory_order_acquire );
0366 node* next_ptr = pool.get_pointer( next );
0367
0368 tagged_node_handle tail2 = tail_.load( memory_order_acquire );
0369 if ( BOOST_LIKELY( tail == tail2 ) ) {
0370 if ( next_ptr == 0 ) {
0371 tagged_node_handle new_tail_next( node_handle, next.get_next_tag() );
0372 if ( tail_node->next.compare_exchange_weak( next, new_tail_next ) ) {
0373 tagged_node_handle new_tail( node_handle, tail.get_next_tag() );
0374 tail_.compare_exchange_strong( tail, new_tail );
0375 return true;
0376 }
0377 } else {
0378 tagged_node_handle new_tail( pool.get_handle( next_ptr ), tail.get_next_tag() );
0379 tail_.compare_exchange_strong( tail, new_tail );
0380 }
0381 }
0382 }
0383 }
0384
0385 #endif
0386
0387 public:
0388
0389
0390
0391
0392
0393
0394
0395
0396 bool unsynchronized_push( T&& t )
0397 {
0398 node* n = pool.template construct< false, false >( std::forward< T >( t ), pool.null_handle() );
0399
0400 if ( n == NULL )
0401 return false;
0402
0403 for ( ;; ) {
0404 tagged_node_handle tail = tail_.load( memory_order_relaxed );
0405 tagged_node_handle next = tail->next.load( memory_order_relaxed );
0406 node* next_ptr = next.get_ptr();
0407
0408 if ( next_ptr == 0 ) {
0409 tail->next.store( tagged_node_handle( n, next.get_next_tag() ), memory_order_relaxed );
0410 tail_.store( tagged_node_handle( n, tail.get_next_tag() ), memory_order_relaxed );
0411 return true;
0412 } else
0413 tail_.store( tagged_node_handle( next_ptr, tail.get_next_tag() ), memory_order_relaxed );
0414 }
0415 }
0416
0417
0418
0419
0420
0421
0422
0423
0424 bool pop( T& ret )
0425 {
0426 return pop< T >( ret );
0427 }
0428
0429
0430
0431
0432
0433
0434
0435
0436
0437 template < typename U >
0438 bool pop( U& ret )
0439 {
0440 for ( ;; ) {
0441 tagged_node_handle head = head_.load( memory_order_acquire );
0442 node* head_ptr = pool.get_pointer( head );
0443
0444 tagged_node_handle tail = tail_.load( memory_order_acquire );
0445 tagged_node_handle next = head_ptr->next.load( memory_order_acquire );
0446 node* next_ptr = pool.get_pointer( next );
0447
0448 tagged_node_handle head2 = head_.load( memory_order_acquire );
0449 if ( BOOST_LIKELY( head == head2 ) ) {
0450 if ( pool.get_handle( head ) == pool.get_handle( tail ) ) {
0451 if ( next_ptr == 0 )
0452 return false;
0453
0454 tagged_node_handle new_tail( pool.get_handle( next ), tail.get_next_tag() );
0455 tail_.compare_exchange_strong( tail, new_tail );
0456
0457 } else {
0458 if ( next_ptr == 0 )
0459
0460
0461
0462
0463
0464 continue;
0465 detail::copy_payload( next_ptr->data, ret );
0466
0467 tagged_node_handle new_head( pool.get_handle( next ), head.get_next_tag() );
0468 if ( head_.compare_exchange_weak( head, new_head ) ) {
0469 pool.template destruct< true >( head );
0470 return true;
0471 }
0472 }
0473 }
0474 }
0475 }
0476
0477 #if !defined( BOOST_NO_CXX17_HDR_OPTIONAL ) || defined( BOOST_DOXYGEN_INVOKED )
0478
0479
0480
0481
0482
0483
0484
0485 std::optional< T > pop( uses_optional_t )
0486 {
0487 T to_dequeue;
0488 if ( pop( to_dequeue ) )
0489 return to_dequeue;
0490 else
0491 return std::nullopt;
0492 }
0493
0494
0495
0496
0497
0498
0499
0500
0501
0502 template < typename U >
0503 std::optional< U > pop( uses_optional_t )
0504 {
0505 U to_dequeue;
0506 if ( pop( to_dequeue ) )
0507 return to_dequeue;
0508 else
0509 return std::nullopt;
0510 }
0511 #endif
0512
0513
0514
0515
0516
0517
0518
0519
0520
0521 bool unsynchronized_pop( T& ret )
0522 {
0523 return unsynchronized_pop< T >( ret );
0524 }
0525
0526
0527
0528
0529
0530
0531
0532
0533
0534
0535
0536 template < typename U >
0537 bool unsynchronized_pop( U& ret )
0538 {
0539 for ( ;; ) {
0540 tagged_node_handle head = head_.load( memory_order_relaxed );
0541 node* head_ptr = pool.get_pointer( head );
0542 tagged_node_handle tail = tail_.load( memory_order_relaxed );
0543 tagged_node_handle next = head_ptr->next.load( memory_order_relaxed );
0544 node* next_ptr = pool.get_pointer( next );
0545
0546 if ( pool.get_handle( head ) == pool.get_handle( tail ) ) {
0547 if ( next_ptr == 0 )
0548 return false;
0549
0550 tagged_node_handle new_tail( pool.get_handle( next ), tail.get_next_tag() );
0551 tail_.store( new_tail );
0552 } else {
0553 if ( next_ptr == 0 )
0554
0555
0556
0557
0558
0559 continue;
0560 detail::copy_payload( next_ptr->data, ret );
0561 tagged_node_handle new_head( pool.get_handle( next ), head.get_next_tag() );
0562 head_.store( new_head );
0563 pool.template destruct< false >( head );
0564 return true;
0565 }
0566 }
0567 }
0568
0569
0570
0571
0572
0573
0574
0575
0576
0577 template < typename Functor >
0578 bool consume_one( Functor&& f )
0579 {
0580 T element;
0581 bool success = pop( element );
0582 if ( success )
0583 f( std::move( element ) );
0584
0585 return success;
0586 }
0587
0588
0589
0590
0591
0592
0593
0594
0595
0596 template < typename Functor >
0597 size_t consume_all( Functor&& f )
0598 {
0599 size_t element_count = 0;
0600 while ( consume_one( f ) )
0601 element_count += 1;
0602
0603 return element_count;
0604 }
0605
0606 private:
0607 #ifndef BOOST_DOXYGEN_INVOKED
0608 atomic< tagged_node_handle > head_;
0609 static constexpr int padding_size = detail::cacheline_bytes - sizeof( tagged_node_handle );
0610 char padding1[ padding_size ];
0611 atomic< tagged_node_handle > tail_;
0612 char padding2[ padding_size ];
0613
0614 pool_t pool;
0615 #endif
0616 };
0617
0618 }}
0619
0620 #if defined( BOOST_INTEL ) && ( BOOST_INTEL_CXX_VERSION > 1000 )
0621 # pragma warning( pop )
0622 #endif
0623
0624 #if defined( _MSC_VER )
0625 # pragma warning( pop )
0626 #endif
0627
0628 #endif