10 #ifndef EIGEN_CXX11_THREADPOOL_EVENTCOUNT_H_ 11 #define EIGEN_CXX11_THREADPOOL_EVENTCOUNT_H_ 53 EventCount(MaxSizeVector<Waiter>& waiters)
54 : state_(kStackMask), waiters_(waiters) {
55 eigen_plain_assert(waiters.size() < (1 << kWaiterBits) - 1);
60 eigen_plain_assert(state_.load() == kStackMask);
67 uint64_t state = state_.load(std::memory_order_relaxed);
70 uint64_t newstate = state + kWaiterInc;
72 if (state_.compare_exchange_weak(state, newstate,
73 std::memory_order_seq_cst))
79 void CommitWait(Waiter* w) {
80 eigen_plain_assert((w->epoch & ~kEpochMask) == 0);
81 w->state = Waiter::kNotSignaled;
82 const uint64_t me = (w - &waiters_[0]) | w->epoch;
83 uint64_t state = state_.load(std::memory_order_seq_cst);
85 CheckState(state,
true);
87 if ((state & kSignalMask) != 0) {
89 newstate = state - kWaiterInc - kSignalInc;
92 newstate = ((state & kWaiterMask) - kWaiterInc) | me;
93 w->next.store(state & (kStackMask | kEpochMask),
94 std::memory_order_relaxed);
97 if (state_.compare_exchange_weak(state, newstate,
98 std::memory_order_acq_rel)) {
99 if ((state & kSignalMask) == 0) {
100 w->epoch += kEpochInc;
110 uint64_t state = state_.load(std::memory_order_relaxed);
112 CheckState(state,
true);
113 uint64_t newstate = state - kWaiterInc;
118 if (((state & kWaiterMask) >> kWaiterShift) ==
119 ((state & kSignalMask) >> kSignalShift))
120 newstate -= kSignalInc;
121 CheckState(newstate);
122 if (state_.compare_exchange_weak(state, newstate,
123 std::memory_order_acq_rel))
130 void Notify(
bool notifyAll) {
131 std::atomic_thread_fence(std::memory_order_seq_cst);
132 uint64_t state = state_.load(std::memory_order_acquire);
135 const uint64_t waiters = (state & kWaiterMask) >> kWaiterShift;
136 const uint64_t signals = (state & kSignalMask) >> kSignalShift;
138 if ((state & kStackMask) == kStackMask && waiters == signals)
return;
143 (state & kWaiterMask) | (waiters << kSignalShift) | kStackMask;
144 }
else if (signals < waiters) {
146 newstate = state + kSignalInc;
149 Waiter* w = &waiters_[state & kStackMask];
150 uint64_t next = w->next.load(std::memory_order_relaxed);
151 newstate = (state & (kWaiterMask | kSignalMask)) | next;
153 CheckState(newstate);
154 if (state_.compare_exchange_weak(state, newstate,
155 std::memory_order_acq_rel)) {
156 if (!notifyAll && (signals < waiters))
158 if ((state & kStackMask) == kStackMask)
return;
159 Waiter* w = &waiters_[state & kStackMask];
160 if (!notifyAll) w->next.store(kStackMask, std::memory_order_relaxed);
168 friend class EventCount;
171 EIGEN_ALIGN_TO_BOUNDARY(128)
std::atomic<uint64_t> next;
173 std::condition_variable cv;
175 unsigned state = kNotSignaled;
192 static const uint64_t kWaiterBits = 14;
193 static const uint64_t kStackMask = (1ull << kWaiterBits) - 1;
194 static const uint64_t kWaiterShift = kWaiterBits;
195 static const uint64_t kWaiterMask = ((1ull << kWaiterBits) - 1)
197 static const uint64_t kWaiterInc = 1ull << kWaiterShift;
198 static const uint64_t kSignalShift = 2 * kWaiterBits;
199 static const uint64_t kSignalMask = ((1ull << kWaiterBits) - 1)
201 static const uint64_t kSignalInc = 1ull << kSignalShift;
202 static const uint64_t kEpochShift = 3 * kWaiterBits;
203 static const uint64_t kEpochBits = 64 - kEpochShift;
204 static const uint64_t kEpochMask = ((1ull << kEpochBits) - 1) << kEpochShift;
205 static const uint64_t kEpochInc = 1ull << kEpochShift;
206 std::atomic<uint64_t> state_;
207 MaxSizeVector<Waiter>& waiters_;
209 static void CheckState(uint64_t state,
bool waiter =
false) {
210 static_assert(kEpochBits >= 20,
"not enough bits to prevent ABA problem");
211 const uint64_t waiters = (state & kWaiterMask) >> kWaiterShift;
212 const uint64_t signals = (state & kSignalMask) >> kSignalShift;
213 eigen_plain_assert(waiters >= signals);
214 eigen_plain_assert(waiters < (1 << kWaiterBits) - 1);
215 eigen_plain_assert(!waiter || waiters > 0);
220 void Park(Waiter* w) {
221 std::unique_lock<std::mutex> lock(w->mu);
222 while (w->state != Waiter::kSignaled) {
223 w->state = Waiter::kWaiting;
228 void Unpark(Waiter* w) {
229 for (Waiter* next; w; w = next) {
230 uint64_t wnext = w->next.load(std::memory_order_relaxed) & kStackMask;
231 next = wnext == kStackMask ? nullptr : &waiters_[wnext];
234 std::unique_lock<std::mutex> lock(w->mu);
236 w->state = Waiter::kSignaled;
239 if (state == Waiter::kWaiting) w->cv.notify_one();
243 EventCount(
const EventCount&) =
delete;
244 void operator=(
const EventCount&) =
delete;
249 #endif // EIGEN_CXX11_THREADPOOL_EVENTCOUNT_H_ Namespace containing all symbols from the Eigen library.
Definition: AutoDiffScalar.h:718