Anklang-0.3.0.dev956+gd75ac925 anklang-0.3.0.dev956+gd75ac925
ASE — Anklang Sound Engine (C++)

« « « Anklang Documentation
Loading...
Searching...
No Matches
atomics.hh
Go to the documentation of this file.
1 // This Source Code Form is licensed MPL-2.0: http://mozilla.org/MPL/2.0
2#pragma once
3
4#include <ase/platform.hh>
5#include <atomic>
6#include <boost/atomic/atomic.hpp> // Needed for gcc to emit CMPXCHG16B
7#include <ase/loft.hh>
8
9namespace Ase {
10
11// == Atomic ==
13template<class T> using Atomic = boost::atomic<T>;
14
15// == AtomicIntrusiveStack ==
19template<class T>
22 std::atomic<T*> head_ = nullptr;
23public:
25 {
27 "Required: std::atomic<T*>& atomic_next_ptrref (T&)");
28 }
30 bool
31 empty () const
32 {
33 return head_.load() == nullptr;
34 }
36 bool
37 push_chain (T *first, T *last)
38 {
39 AtomicTPtr &last_nextptr = atomic_next_ptrref (last);
40 ASE_ASSERT_RETURN (last_nextptr == nullptr, false);
41 T *exchange = head_.load();
42 do // compare_exchange_strong() == false does: exchange = head_;
43 last_nextptr = exchange;
44 while (!head_.compare_exchange_strong (exchange, first));
45 const bool was_empty = exchange == nullptr;
46 return was_empty;
47 }
49 bool
50 push (T *el)
51 {
52 return push_chain (el, el);
53 }
56 T*
58 {
59 T *exchange = head_.load();
60 do // compare_exchange_strong() == false does: exchange = head_;
61 {}
62 while (exchange && !head_.compare_exchange_strong (exchange, nullptr));
63 return exchange;
64 }
67 T*
69 {
70 T *current = pop_all(), *prev = nullptr;;
71 while (current)
72 {
73 AtomicTPtr &el_nextptr = atomic_next_ptrref (current);
74 T *next = el_nextptr;
75 el_nextptr = prev;
76 prev = current;
77 current = next;
78 }
79 return prev;
80 }
81};
82
83// == MpmcStack ==
94template<typename Node> /*alignas (64)*/
96{
97 MpmcStack()
98 {
99 Head thead = head_;
100 thead.next = (Node*) TAIL_; // valid stack: `head.next != nullptr`
101 head_ = thead;
102 static_assert (Atomic<Head>::is_always_lock_free);
103 static_assert (decltype (std::declval<Node>().intr_ptr_)::is_always_lock_free);
104 }
105 ~MpmcStack()
106 {
107 assert (true == empty()); // may only destroy empty stacks
108 Head nhead, ohead = head_.load();
109 do // Note, `compare_exchange_weak == 0` does `ohead := head_`
110 {
111 nhead.aba_counter = ohead.aba_counter;
112 nhead.next = nullptr;
113 }
114 while (!head_.compare_exchange_weak (ohead, nhead));
115 ohead = head_.load();
116 assert (ohead.next == nullptr); // valid stack: `head.next != nullptr`
117 }
118 bool
119 empty() const
120 {
121 const Head ohead = head_.load();
122 return ohead.next == (Node*) TAIL_;
123 }
124 bool
125 push (Node *node)
126 {
127 assert (node && !node->intr_ptr_); // `node->intr_ptr_ != nullptr` indicates enlisted node
128 Head nhead, ohead = head_.load();
129 assert (ohead.next); // valid stack: `head.next != nullptr`
130 do // Note, `compare_exchange_weak == 0` does `ohead := head_`
131 {
132 node->intr_ptr_ = ohead.next; // access OK, push() assumes node ownership
133 nhead.aba_counter = ohead.aba_counter; // no inc, ABA needs pop() which increments
134 nhead.next = node;
135 }
136 while (!head_.compare_exchange_weak (ohead, nhead));
137 const bool was_empty = ohead.next == (Node*) TAIL_;
138 return was_empty;
139 }
140 Node*
141 pop()
142 {
143 Node *node;
144 Head nhead, ohead = head_.load();
145 assert (ohead.next); // valid stack: `head.next != nullptr`
146 do // Note, `compare_exchange_weak == 0` does `ohead := head_`
147 {
148 node = ohead.next;
149 if (node == (Node*) TAIL_)
150 return nullptr; // empty stack
151 nhead.next = node->intr_ptr_; // maybe access-after-pop, needs non-reclaimable memory
152 nhead.aba_counter = ohead.aba_counter + 1; // tag pops to make ABA highly unlikely
153 }
154 while (!head_.compare_exchange_weak (ohead, nhead));
155 node->intr_ptr_ = nullptr; // `node->intr_ptr_ == nullptr` indicates unlisted node
156 return node;
157 }
158 // for debugging puposes, the pointer returned may be already invalid
159 Node*
160 peek()
161 {
162 const Head ohead = head_.load();
163 return ohead.next;
164 }
165private:
166 struct Head {
167 Node *next = nullptr;
168 uintptr_t aba_counter = 0;
169 };
170 Atomic<Head> head_;
171 static constexpr uintptr_t TAIL_ = ~uintptr_t (0);
172};
173
174// == AtomicStack ==
177template<typename Value, template<class> class GrowOnlyAllocator = Loft::Allocator>
180 bool empty() const { return stack_.empty(); }
182 bool
183 push (Value &&value)
184 {
185 Node *node = node_realloc (nullptr);
186 std::construct_at (node, std::move (value));
187 return stack_.push (node);
188 }
190 bool
191 push (const Value &value)
192 {
193 return push (std::move (Value (value)));
194 }
196 bool
197 pop (Value &value)
198 {
199 Node *node = stack_.pop();
200 if (!node)
201 return false;
202 value = std::move (node->value);
203 std::destroy_at (node);
204 node_realloc (node);
205 return true;
206 }
207private:
208 struct Node {
209 Value value = {};
210 Atomic<Node*> intr_ptr_ = nullptr; // atomic intrusive pointer
211 };
212 MpmcStack<Node> stack_;
213 Node*
214 node_realloc (Node *node)
215 {
216 static constexpr GrowOnlyAllocator<Node> grow_only_alloc;
217 static_assert (GrowOnlyAllocator<Node>::allows_read_after_free == true);
218 if (node) {
219 grow_only_alloc.deallocate (node, 1);
220 node = nullptr;
221 } else
222 node = grow_only_alloc.allocate (1);
223 return node;
224 }
225};
226
227// == AtomicBits ==
228using AtomicU64 = std::atomic<uint64>;
229
231class AtomicBits : protected std::vector<AtomicU64> {
232protected:
233 std::vector<AtomicU64>& base() { return *this; }
234 const std::vector<AtomicU64>& base() const { return *this; }
235public:
236 class Iter {
237 AtomicBits *atomics_ = nullptr;
238 size_t u_ = ~size_t (0), s_ = 0;
239 AtomicU64& ubits() const { return const_cast<Iter*> (this)->atomics_->u64 (u_); }
240 size_t usize() const { return atomics_ ? atomics_->usize() : 0; }
241 public:
242 explicit Iter (AtomicBits *a, size_t p) : atomics_ (a), u_ (p >> 6), s_ (p & 63) {}
243 /*ctor*/ Iter () = default;
244 /*copy*/ Iter (const Iter&) = default;
245 Iter& operator= (const Iter&) = default;
246 size_t position () const { return u_ + s_; }
247 bool is_set () const { return valid() && ubits().load() & (uint64 (1) << s_); }
248 bool done () const { return !valid(); }
249 bool valid () const { return atomics_ && u_ < usize(); }
250 bool clear ();
251 bool set (bool toggle);
252 bool xor_ (bool toggle);
253 Iter& big_inc1 ();
254 Iter& operator= (bool toggle) { set (toggle); return *this; }
255 Iter& operator^= (bool toggle) { xor_ (toggle); return *this; }
256 Iter& operator|= (bool toggle) { if (toggle) set (1); return *this; }
257 Iter& operator&= (bool toggle) { if (!toggle) set (0); return *this; }
258 Iter& operator++ () { if (!done()) { s_ = (s_ + 1) & 63; u_ += s_ == 0; } return *this;}
259 Iter& operator++ (int) { return this->operator++(); }
260 bool operator== (const Iter &o) const { return (done() && o.done()) || position() == o.position(); }
261 bool operator!= (const Iter &o) const { return !operator== (o); }
262 bool operator== (bool b) const { return b == is_set(); }
263 bool operator!= (bool b) const { return b != is_set(); }
264 explicit operator bool () const { return is_set(); }
265 };
266 /*dtor*/ ~AtomicBits () {}
267 explicit AtomicBits (size_t nbits) : std::vector<AtomicU64> ((nbits + 63) / 64) {}
268 explicit AtomicBits (const AtomicBits&) = delete;
269 size_t usize () const { return base().size(); }
270 size_t size () const { return 64 * usize(); }
271 uint64 u64 (size_t upos) const { return base()[upos]; }
272 AtomicU64& u64 (size_t upos) { return base()[upos]; }
273 AtomicBits& operator= (const AtomicBits&) = delete;
274 Iter iter (size_t pos) { return Iter (this, pos); }
275 Iter begin () { return iter (0); }
276 Iter end () const { return {}; }
277 bool all (bool toggle) const;
278 bool any (bool toggle) const;
279 bool set (size_t pos, bool toggle) { return iter (pos).set (toggle); }
280 bool operator[] (size_t pos) const { return const_cast<AtomicBits*> (this)->iter (pos).is_set(); }
281 Iter operator[] (size_t pos) { return iter (pos); }
282};
283
284inline bool
285AtomicBits::all (bool toggle) const
286{
287 const uint64 match = toggle * ~uint64 (0);
288 for (size_t u = 0; u < usize(); u++)
289 if (u64 (u) != match)
290 return false;
291 return true;
292}
293
294inline bool
295AtomicBits::any (bool toggle) const
296{
297 if (toggle) {
298 for (size_t u = 0; u < usize(); u++)
299 if (u64 (u))
300 return true;
301 } else {
302 for (size_t u = 0; u < usize(); u++)
303 if (u64 (u) != ~uint64 (0))
304 return true;
305 }
306 return false;
307}
308
309inline bool
310AtomicBits::Iter::set (bool toggle)
311{
312 ASE_ASSERT_RETURN (!done(), false);
313 bool last;
314 if (toggle)
315 last = ubits().fetch_or (uint64 (1) << s_);
316 else
317 last = ubits().fetch_and (~(uint64 (1) << s_));
318 return last;
319}
320
321inline bool
322AtomicBits::Iter::clear()
323{
324 if (ASE_ISLIKELY (valid()) &&
325 ASE_ISLIKELY (!(ubits().load() & (uint64 (1) << s_))))
326 return false; // fast path
327 return set (0);
328}
329
330inline bool
331AtomicBits::Iter::xor_ (bool toggle)
332{
333 ASE_ASSERT_RETURN (!done(), false);
334 bool last;
335 last = ubits().fetch_xor (toggle ? uint64 (1) << s_ : 0);
336 return last;
337}
338
340inline AtomicBits::Iter&
342{
343 ASE_RETURN_UNLESS (valid(), *this);
344 // if any bit is set in current block, increment by one
345 if (ubits().load())
346 return ++*this;
347 // else, jump to next block
348 s_ = 0;
349 do
350 u_++;
351 while (!ubits().load() && u_ < usize());
352 return *this;
353}
354
355} // Ase
356
assert
Iter & big_inc1()
Increment iterator by 1, allow big increments skipping zero bits.
Definition atomics.hh:341
Vector of atomic bits, operates in blocks of 64 bits.
Definition atomics.hh:231
Lock-free stack with atomic push() and pop_all operations.
Definition atomics.hh:20
T * pop_reversed()
Atomically pop all elements from the stack in FIFO order.
Definition atomics.hh:68
T * pop_all()
Atomically pop all elements from the stack in LIFO order.
Definition atomics.hh:57
bool push_chain(T *first, T *last)
Atomically push linked nodes first->…->last onto the stack, returns was_empty.
Definition atomics.hh:37
bool push(T *el)
Atomically push el onto the stack, returns was_empty.
Definition atomics.hh:50
bool empty() const
Check if poppingreturns null.
Definition atomics.hh:31
Allocator for STL containers.
Definition loft.hh:91
T compare_exchange_strong(T... args)
T construct_at(T... args)
#define ASE_ASSERT_RETURN(expr,...)
Return from the current function if expr evaluates to false and issue an assertion warning.
Definition cxxaux.hh:81
#define ASE_RETURN_UNLESS(cond,...)
Return silently if cond does not evaluate to true, with return value ...
Definition cxxaux.hh:78
#define ASE_ISLIKELY(expr)
Compiler hint to optimize for expr evaluating to true.
Definition cxxaux.hh:44
T destroy_at(T... args)
T fetch_and(T... args)
T fetch_or(T... args)
T load(T... args)
The Anklang C++ API namespace.
Definition api.hh:8
boost::atomic< T > Atomic
Substitute for std::atomic<> with fixes for GCC.
Definition atomics.hh:13
uint64_t uint64
A 64-bit unsigned integer, use PRI*64 in format strings.
Definition cxxaux.hh:24
T size(T... args)
typedef uintptr_t
Thread-safe, lock-free stack based on MpmcStack and an Allocator with allows_read_after_free.
Definition atomics.hh:178
bool push(const Value &value)
Add a copy of value to top of the stack, returns if the stack was empty (true).
Definition atomics.hh:191
bool empty() const
Return true if the stack holds no items.
Definition atomics.hh:180
bool pop(Value &value)
Pop value from top of the stack, returns if value was reassigned (true), otherwise the stack was empt...
Definition atomics.hh:197
bool push(Value &&value)
Add value to top of the stack, returns if the stack was empty (true).
Definition atomics.hh:183
Multi-producer, multi-consumer stack for non-reclaimable memory nodes.
Definition atomics.hh:96
Value type used to interface with various property types.
Definition value.hh:53