File indexing completed on 2025-01-18 10:12:49
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017 #ifndef __TBB_mutex_padding_H
0018 #define __TBB_mutex_padding_H
0019
0020
0021
0022
0023
0024 namespace tbb {
0025 namespace interface7 {
0026 namespace internal {
0027
0028 static const size_t cache_line_size = 64;
0029
0030
0031
0032 template<typename Mutex, bool is_rw> class padded_mutex;
0033
0034 template<typename Mutex>
0035 class padded_mutex<Mutex,false> : tbb::internal::mutex_copy_deprecated_and_disabled {
0036 typedef long pad_type;
0037 pad_type my_pad[((sizeof(Mutex)+cache_line_size-1)/cache_line_size+1)*cache_line_size/sizeof(pad_type)];
0038
0039 Mutex *impl() { return (Mutex *)((uintptr_t(this)|(cache_line_size-1))+1);}
0040
0041 public:
0042 static const bool is_rw_mutex = Mutex::is_rw_mutex;
0043 static const bool is_recursive_mutex = Mutex::is_recursive_mutex;
0044 static const bool is_fair_mutex = Mutex::is_fair_mutex;
0045
0046 padded_mutex() { new(impl()) Mutex(); }
0047 ~padded_mutex() { impl()->~Mutex(); }
0048
0049
0050 class scoped_lock : tbb::internal::no_copy {
0051 typename Mutex::scoped_lock my_scoped_lock;
0052 public:
0053 scoped_lock() : my_scoped_lock() {}
0054 scoped_lock( padded_mutex& m ) : my_scoped_lock(*m.impl()) { }
0055 ~scoped_lock() { }
0056
0057 void acquire( padded_mutex& m ) { my_scoped_lock.acquire(*m.impl()); }
0058 bool try_acquire( padded_mutex& m ) { return my_scoped_lock.try_acquire(*m.impl()); }
0059 void release() { my_scoped_lock.release(); }
0060 };
0061 };
0062
0063 template<typename Mutex>
0064 class padded_mutex<Mutex,true> : tbb::internal::mutex_copy_deprecated_and_disabled {
0065 typedef long pad_type;
0066 pad_type my_pad[((sizeof(Mutex)+cache_line_size-1)/cache_line_size+1)*cache_line_size/sizeof(pad_type)];
0067
0068 Mutex *impl() { return (Mutex *)((uintptr_t(this)|(cache_line_size-1))+1);}
0069
0070 public:
0071 static const bool is_rw_mutex = Mutex::is_rw_mutex;
0072 static const bool is_recursive_mutex = Mutex::is_recursive_mutex;
0073 static const bool is_fair_mutex = Mutex::is_fair_mutex;
0074
0075 padded_mutex() { new(impl()) Mutex(); }
0076 ~padded_mutex() { impl()->~Mutex(); }
0077
0078
0079 class scoped_lock : tbb::internal::no_copy {
0080 typename Mutex::scoped_lock my_scoped_lock;
0081 public:
0082 scoped_lock() : my_scoped_lock() {}
0083 scoped_lock( padded_mutex& m, bool write = true ) : my_scoped_lock(*m.impl(),write) { }
0084 ~scoped_lock() { }
0085
0086 void acquire( padded_mutex& m, bool write = true ) { my_scoped_lock.acquire(*m.impl(),write); }
0087 bool try_acquire( padded_mutex& m, bool write = true ) { return my_scoped_lock.try_acquire(*m.impl(),write); }
0088 bool upgrade_to_writer() { return my_scoped_lock.upgrade_to_writer(); }
0089 bool downgrade_to_reader() { return my_scoped_lock.downgrade_to_reader(); }
0090 void release() { my_scoped_lock.release(); }
0091 };
0092 };
0093
0094 }
0095 }
0096 }
0097
0098 #endif