File indexing completed on 2025-01-18 10:12:59
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017 #ifndef __TBB_queuing_mutex_H
0018 #define __TBB_queuing_mutex_H
0019
0020 #define __TBB_queuing_mutex_H_include_area
0021 #include "internal/_warning_suppress_enable_notice.h"
0022
0023 #include <cstring>
0024 #include "atomic.h"
0025 #include "tbb_profiling.h"
0026
0027 namespace tbb {
0028
0029
0030
0031 class queuing_mutex : internal::mutex_copy_deprecated_and_disabled {
0032 public:
0033
0034 queuing_mutex() {
0035 q_tail = NULL;
0036 #if TBB_USE_THREADING_TOOLS
0037 internal_construct();
0038 #endif
0039 }
0040
0041
0042
0043
0044 class scoped_lock: internal::no_copy {
0045
0046 void initialize() {
0047 mutex = NULL;
0048 going = 0;
0049 #if TBB_USE_ASSERT
0050 internal::poison_pointer(next);
0051 #endif
0052 }
0053
0054 public:
0055
0056
0057 scoped_lock() {initialize();}
0058
0059
0060 scoped_lock( queuing_mutex& m ) {
0061 initialize();
0062 acquire(m);
0063 }
0064
0065
0066 ~scoped_lock() {
0067 if( mutex ) release();
0068 }
0069
0070
0071 void __TBB_EXPORTED_METHOD acquire( queuing_mutex& m );
0072
0073
0074 bool __TBB_EXPORTED_METHOD try_acquire( queuing_mutex& m );
0075
0076
0077 void __TBB_EXPORTED_METHOD release();
0078
0079 private:
0080
0081 queuing_mutex* mutex;
0082
0083
0084 scoped_lock *next;
0085
0086
0087
0088
0089
0090 uintptr_t going;
0091 };
0092
0093 void __TBB_EXPORTED_METHOD internal_construct();
0094
0095
0096 static const bool is_rw_mutex = false;
0097 static const bool is_recursive_mutex = false;
0098 static const bool is_fair_mutex = true;
0099
0100 private:
0101
0102 atomic<scoped_lock*> q_tail;
0103
0104 };
0105
0106 __TBB_DEFINE_PROFILING_SET_NAME(queuing_mutex)
0107
0108 }
0109
0110 #include "internal/_warning_suppress_disable_notice.h"
0111 #undef __TBB_queuing_mutex_H_include_area
0112
0113 #endif