File indexing completed on 2025-12-16 10:14:27
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #ifndef EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_
0011 #define EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_
0012
0013 namespace Eigen {
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037 template <typename Work, unsigned kSize>
0038 class RunQueue {
0039 public:
0040 RunQueue() : front_(0), back_(0) {
0041
0042 eigen_plain_assert((kSize & (kSize - 1)) == 0);
0043 eigen_plain_assert(kSize > 2);
0044 eigen_plain_assert(kSize <= (64 << 10));
0045 for (unsigned i = 0; i < kSize; i++)
0046 array_[i].state.store(kEmpty, std::memory_order_relaxed);
0047 }
0048
0049 ~RunQueue() { eigen_plain_assert(Size() == 0); }
0050
0051
0052
0053 Work PushFront(Work w) {
0054 unsigned front = front_.load(std::memory_order_relaxed);
0055 Elem* e = &array_[front & kMask];
0056 uint8_t s = e->state.load(std::memory_order_relaxed);
0057 if (s != kEmpty ||
0058 !e->state.compare_exchange_strong(s, kBusy, std::memory_order_acquire))
0059 return w;
0060 front_.store(front + 1 + (kSize << 1), std::memory_order_relaxed);
0061 e->w = std::move(w);
0062 e->state.store(kReady, std::memory_order_release);
0063 return Work();
0064 }
0065
0066
0067
0068 Work PopFront() {
0069 unsigned front = front_.load(std::memory_order_relaxed);
0070 Elem* e = &array_[(front - 1) & kMask];
0071 uint8_t s = e->state.load(std::memory_order_relaxed);
0072 if (s != kReady ||
0073 !e->state.compare_exchange_strong(s, kBusy, std::memory_order_acquire))
0074 return Work();
0075 Work w = std::move(e->w);
0076 e->state.store(kEmpty, std::memory_order_release);
0077 front = ((front - 1) & kMask2) | (front & ~kMask2);
0078 front_.store(front, std::memory_order_relaxed);
0079 return w;
0080 }
0081
0082
0083
0084 Work PushBack(Work w) {
0085 std::unique_lock<std::mutex> lock(mutex_);
0086 unsigned back = back_.load(std::memory_order_relaxed);
0087 Elem* e = &array_[(back - 1) & kMask];
0088 uint8_t s = e->state.load(std::memory_order_relaxed);
0089 if (s != kEmpty ||
0090 !e->state.compare_exchange_strong(s, kBusy, std::memory_order_acquire))
0091 return w;
0092 back = ((back - 1) & kMask2) | (back & ~kMask2);
0093 back_.store(back, std::memory_order_relaxed);
0094 e->w = std::move(w);
0095 e->state.store(kReady, std::memory_order_release);
0096 return Work();
0097 }
0098
0099
0100 Work PopBack() {
0101 if (Empty()) return Work();
0102 std::unique_lock<std::mutex> lock(mutex_);
0103 unsigned back = back_.load(std::memory_order_relaxed);
0104 Elem* e = &array_[back & kMask];
0105 uint8_t s = e->state.load(std::memory_order_relaxed);
0106 if (s != kReady ||
0107 !e->state.compare_exchange_strong(s, kBusy, std::memory_order_acquire))
0108 return Work();
0109 Work w = std::move(e->w);
0110 e->state.store(kEmpty, std::memory_order_release);
0111 back_.store(back + 1 + (kSize << 1), std::memory_order_relaxed);
0112 return w;
0113 }
0114
0115
0116
0117 unsigned PopBackHalf(std::vector<Work>* result) {
0118 if (Empty()) return 0;
0119 std::unique_lock<std::mutex> lock(mutex_);
0120 unsigned back = back_.load(std::memory_order_relaxed);
0121 unsigned size = Size();
0122 unsigned mid = back;
0123 if (size > 1) mid = back + (size - 1) / 2;
0124 unsigned n = 0;
0125 unsigned start = 0;
0126 for (; static_cast<int>(mid - back) >= 0; mid--) {
0127 Elem* e = &array_[mid & kMask];
0128 uint8_t s = e->state.load(std::memory_order_relaxed);
0129 if (n == 0) {
0130 if (s != kReady || !e->state.compare_exchange_strong(
0131 s, kBusy, std::memory_order_acquire))
0132 continue;
0133 start = mid;
0134 } else {
0135
0136
0137 eigen_plain_assert(s == kReady);
0138 }
0139 result->push_back(std::move(e->w));
0140 e->state.store(kEmpty, std::memory_order_release);
0141 n++;
0142 }
0143 if (n != 0)
0144 back_.store(start + 1 + (kSize << 1), std::memory_order_relaxed);
0145 return n;
0146 }
0147
0148
0149
0150 unsigned Size() const { return SizeOrNotEmpty<true>(); }
0151
0152
0153
0154 bool Empty() const { return SizeOrNotEmpty<false>() == 0; }
0155
0156
0157 void Flush() {
0158 while (!Empty()) {
0159 PopFront();
0160 }
0161 }
0162
0163 private:
0164 static const unsigned kMask = kSize - 1;
0165 static const unsigned kMask2 = (kSize << 1) - 1;
0166 struct Elem {
0167 std::atomic<uint8_t> state;
0168 Work w;
0169 };
0170 enum {
0171 kEmpty,
0172 kBusy,
0173 kReady,
0174 };
0175 std::mutex mutex_;
0176
0177
0178
0179
0180
0181
0182
0183 std::atomic<unsigned> front_;
0184 std::atomic<unsigned> back_;
0185 Elem array_[kSize];
0186
0187
0188
0189
0190 template<bool NeedSizeEstimate>
0191 unsigned SizeOrNotEmpty() const {
0192
0193
0194 unsigned front = front_.load(std::memory_order_acquire);
0195 for (;;) {
0196
0197 unsigned back = back_.load(std::memory_order_acquire);
0198 unsigned front1 = front_.load(std::memory_order_relaxed);
0199 if (front != front1) {
0200 front = front1;
0201 std::atomic_thread_fence(std::memory_order_acquire);
0202 continue;
0203 }
0204 if (NeedSizeEstimate) {
0205 return CalculateSize(front, back);
0206 } else {
0207
0208 unsigned maybe_zero = ((front ^ back) & kMask2);
0209
0210
0211 eigen_assert((CalculateSize(front, back) == 0) == (maybe_zero == 0));
0212 return maybe_zero;
0213 }
0214 }
0215 }
0216
0217 EIGEN_ALWAYS_INLINE
0218 unsigned CalculateSize(unsigned front, unsigned back) const {
0219 int size = (front & kMask2) - (back & kMask2);
0220
0221 if (size < 0) size += 2 * kSize;
0222
0223
0224
0225
0226 if (size > static_cast<int>(kSize)) size = kSize;
0227 return static_cast<unsigned>(size);
0228 }
0229
0230 RunQueue(const RunQueue&) = delete;
0231 void operator=(const RunQueue&) = delete;
0232 };
0233
0234 }
0235
0236 #endif