Anklang 0.3.0-460-gc4ef46ba
ASE — Anklang Sound Engine (C++)

« « « Anklang Documentation
Loading...
Searching...
No Matches
atomics.hh
Go to the documentation of this file.
1 // This Source Code Form is licensed MPL-2.0: http://mozilla.org/MPL/2.0
2#ifndef __ASE_ATOMICS_HH__
3#define __ASE_ATOMICS_HH__
4
5#include <ase/platform.hh>
6#include <atomic>
7#include <boost/atomic/atomic.hpp> // Needed for gcc to emit CMPXCHG16B
8#include <ase/loft.hh>
9
10namespace Ase {
11
12// == Atomic ==
14template<class T> using Atomic = boost::atomic<T>;
15
16// == AtomicIntrusiveStack ==
20template<class T>
23 std::atomic<T*> head_ = nullptr;
24public:
26 {
28 "Required: std::atomic<T*>& atomic_next_ptrref (T&)");
29 }
31 bool
32 empty () const
33 {
34 return head_.load() == nullptr;
35 }
37 bool
38 push_chain (T *first, T *last)
39 {
40 AtomicTPtr &last_nextptr = atomic_next_ptrref (last);
41 ASE_ASSERT_RETURN (last_nextptr == nullptr, false);
42 T *exchange = head_.load();
43 do // compare_exchange_strong() == false does: exchange = head_;
44 last_nextptr = exchange;
45 while (!head_.compare_exchange_strong (exchange, first));
46 const bool was_empty = exchange == nullptr;
47 return was_empty;
48 }
50 bool
51 push (T *el)
52 {
53 return push_chain (el, el);
54 }
57 T*
59 {
60 T *exchange = head_.load();
61 do // compare_exchange_strong() == false does: exchange = head_;
62 {}
63 while (exchange && !head_.compare_exchange_strong (exchange, nullptr));
64 return exchange;
65 }
68 T*
70 {
71 T *current = pop_all(), *prev = nullptr;;
72 while (current)
73 {
74 AtomicTPtr &el_nextptr = atomic_next_ptrref (current);
75 T *next = el_nextptr;
76 el_nextptr = prev;
77 prev = current;
78 current = next;
79 }
80 return prev;
81 }
82};
83
84// == MpmcStack ==
95template<typename Node> /*alignas (64)*/
97{
98 MpmcStack()
99 {
100 Head thead = head_;
101 thead.next = (Node*) TAIL_; // valid stack: `head.next != nullptr`
102 head_ = thead;
103 static_assert (Atomic<Head>::is_always_lock_free);
104 static_assert (decltype (std::declval<Node>().intr_ptr_)::is_always_lock_free);
105 }
106 ~MpmcStack()
107 {
108 assert (true == empty()); // may only destroy empty stacks
109 Head nhead, ohead = head_.load();
110 do // Note, `compare_exchange_weak == 0` does `ohead := head_`
111 {
112 nhead.aba_counter = ohead.aba_counter;
113 nhead.next = nullptr;
114 }
115 while (!head_.compare_exchange_weak (ohead, nhead));
116 ohead = head_.load();
117 assert (ohead.next == nullptr); // valid stack: `head.next != nullptr`
118 }
119 bool
120 empty() const
121 {
122 const Head ohead = head_.load();
123 return ohead.next == (Node*) TAIL_;
124 }
125 bool
126 push (Node *node)
127 {
128 assert (node && !node->intr_ptr_); // `node->intr_ptr_ != nullptr` indicates enlisted node
129 Head nhead, ohead = head_.load();
130 assert (ohead.next); // valid stack: `head.next != nullptr`
131 do // Note, `compare_exchange_weak == 0` does `ohead := head_`
132 {
133 node->intr_ptr_ = ohead.next; // access OK, push() assumes node ownership
134 nhead.aba_counter = ohead.aba_counter; // no inc, ABA needs pop() which increments
135 nhead.next = node;
136 }
137 while (!head_.compare_exchange_weak (ohead, nhead));
138 const bool was_empty = ohead.next == (Node*) TAIL_;
139 return was_empty;
140 }
141 Node*
142 pop()
143 {
144 Node *node;
145 Head nhead, ohead = head_.load();
146 assert (ohead.next); // valid stack: `head.next != nullptr`
147 do // Note, `compare_exchange_weak == 0` does `ohead := head_`
148 {
149 node = ohead.next;
150 if (node == (Node*) TAIL_)
151 return nullptr; // empty stack
152 nhead.next = node->intr_ptr_; // maybe access-after-pop, needs non-reclaimable memory
153 nhead.aba_counter = ohead.aba_counter + 1; // tag pops to make ABA highly unlikely
154 }
155 while (!head_.compare_exchange_weak (ohead, nhead));
156 node->intr_ptr_ = nullptr; // `node->intr_ptr_ == nullptr` indicates unlisted node
157 return node;
158 }
159 // for debugging puposes, the pointer returned may be already invalid
160 Node*
161 peek()
162 {
163 const Head ohead = head_.load();
164 return ohead.next;
165 }
166private:
167 struct Head {
168 Node *next = nullptr;
169 uintptr_t aba_counter = 0;
170 };
171 Atomic<Head> head_;
172 static constexpr uintptr_t TAIL_ = ~uintptr_t (0);
173};
174
175// == AtomicStack ==
178template<typename Value, template<class> class GrowOnlyAllocator = Loft::Allocator>
181 bool empty() const { return stack_.empty(); }
183 bool
184 push (Value &&value)
185 {
186 Node *node = node_realloc (nullptr);
187 std::construct_at (node, std::move (value));
188 return stack_.push (node);
189 }
191 bool
192 push (const Value &value)
193 {
194 return push (std::move (Value (value)));
195 }
197 bool
198 pop (Value &value)
199 {
200 Node *node = stack_.pop();
201 if (!node)
202 return false;
203 value = std::move (node->value);
204 std::destroy_at (node);
205 node_realloc (node);
206 return true;
207 }
208private:
209 struct Node {
210 Value value = {};
211 Atomic<Node*> intr_ptr_ = nullptr; // atomic intrusive pointer
212 };
213 MpmcStack<Node> stack_;
214 Node*
215 node_realloc (Node *node)
216 {
217 static constexpr GrowOnlyAllocator<Node> grow_only_alloc;
218 static_assert (GrowOnlyAllocator<Node>::allows_read_after_free == true);
219 if (node) {
220 grow_only_alloc.deallocate (node, 1);
221 node = nullptr;
222 } else
223 node = grow_only_alloc.allocate (1);
224 return node;
225 }
226};
227
228// == AtomicBits ==
229using AtomicU64 = std::atomic<uint64>;
230
232class AtomicBits : protected std::vector<AtomicU64> {
233protected:
234 std::vector<AtomicU64>& base() { return *this; }
235 const std::vector<AtomicU64>& base() const { return *this; }
236public:
237 class Iter {
238 AtomicBits *atomics_ = nullptr;
239 size_t u_ = ~size_t (0), s_ = 0;
240 AtomicU64& ubits() const { return const_cast<Iter*> (this)->atomics_->u64 (u_); }
241 size_t usize() const { return atomics_ ? atomics_->usize() : 0; }
242 public:
243 explicit Iter (AtomicBits *a, size_t p) : atomics_ (a), u_ (p >> 6), s_ (p & 63) {}
244 /*ctor*/ Iter () = default;
245 /*copy*/ Iter (const Iter&) = default;
246 Iter& operator= (const Iter&) = default;
247 size_t position () const { return u_ + s_; }
248 bool is_set () const { return valid() && ubits().load() & (uint64 (1) << s_); }
249 bool done () const { return !valid(); }
250 bool valid () const { return atomics_ && u_ < usize(); }
251 bool clear ();
252 bool set (bool toggle);
253 bool xor_ (bool toggle);
254 Iter& big_inc1 ();
255 Iter& operator= (bool toggle) { set (toggle); return *this; }
256 Iter& operator^= (bool toggle) { xor_ (toggle); return *this; }
257 Iter& operator|= (bool toggle) { if (toggle) set (1); return *this; }
258 Iter& operator&= (bool toggle) { if (!toggle) set (0); return *this; }
259 Iter& operator++ () { if (!done()) { s_ = (s_ + 1) & 63; u_ += s_ == 0; } return *this;}
260 Iter& operator++ (int) { return this->operator++(); }
261 bool operator== (const Iter &o) const { return (done() && o.done()) || position() == o.position(); }
262 bool operator!= (const Iter &o) const { return !operator== (o); }
263 bool operator== (bool b) const { return b == is_set(); }
264 bool operator!= (bool b) const { return b != is_set(); }
265 explicit operator bool () const { return is_set(); }
266 };
267 /*dtor*/ ~AtomicBits () {}
268 explicit AtomicBits (size_t nbits) : std::vector<AtomicU64> ((nbits + 63) / 64) {}
269 explicit AtomicBits (const AtomicBits&) = delete;
270 size_t usize () const { return base().size(); }
271 size_t size () const { return 64 * usize(); }
272 uint64 u64 (size_t upos) const { return base()[upos]; }
273 AtomicU64& u64 (size_t upos) { return base()[upos]; }
274 AtomicBits& operator= (const AtomicBits&) = delete;
275 Iter iter (size_t pos) { return Iter (this, pos); }
276 Iter begin () { return iter (0); }
277 Iter end () const { return {}; }
278 bool all (bool toggle) const;
279 bool any (bool toggle) const;
280 bool set (size_t pos, bool toggle) { return iter (pos).set (toggle); }
281 bool operator[] (size_t pos) const { return const_cast<AtomicBits*> (this)->iter (pos).is_set(); }
282 Iter operator[] (size_t pos) { return iter (pos); }
283};
284
285inline bool
286AtomicBits::all (bool toggle) const
287{
288 const uint64 match = toggle * ~uint64 (0);
289 for (size_t u = 0; u < usize(); u++)
290 if (u64 (u) != match)
291 return false;
292 return true;
293}
294
295inline bool
296AtomicBits::any (bool toggle) const
297{
298 if (toggle) {
299 for (size_t u = 0; u < usize(); u++)
300 if (u64 (u))
301 return true;
302 } else {
303 for (size_t u = 0; u < usize(); u++)
304 if (u64 (u) != ~uint64 (0))
305 return true;
306 }
307 return false;
308}
309
310inline bool
311AtomicBits::Iter::set (bool toggle)
312{
313 ASE_ASSERT_RETURN (!done(), false);
314 bool last;
315 if (toggle)
316 last = ubits().fetch_or (uint64 (1) << s_);
317 else
318 last = ubits().fetch_and (~(uint64 (1) << s_));
319 return last;
320}
321
322inline bool
323AtomicBits::Iter::clear()
324{
325 if (ASE_ISLIKELY (valid()) &&
326 ASE_ISLIKELY (!(ubits().load() & (uint64 (1) << s_))))
327 return false; // fast path
328 return set (0);
329}
330
331inline bool
332AtomicBits::Iter::xor_ (bool toggle)
333{
334 ASE_ASSERT_RETURN (!done(), false);
335 bool last;
336 last = ubits().fetch_xor (toggle ? uint64 (1) << s_ : 0);
337 return last;
338}
339
341inline AtomicBits::Iter&
343{
344 ASE_RETURN_UNLESS (valid(), *this);
345 // if any bit is set in current block, increment by one
346 if (ubits().load())
347 return ++*this;
348 // else, jump to next block
349 s_ = 0;
350 do
351 u_++;
352 while (!ubits().load() && u_ < usize());
353 return *this;
354}
355
356} // Ase
357
358#endif /* __ASE_ATOMICS_HH__ */
assert
Iter & big_inc1()
Increment iterator by 1, allow big increments skipping zero bits.
Definition atomics.hh:342
Vector of atomic bits, operates in blocks of 64 bits.
Definition atomics.hh:232
bool push_chain(T *first, T *last)
Atomically push linked nodes first->…->last onto the stack, returns was_empty.
Definition atomics.hh:38
bool push(T *el)
Atomically push el onto the stack, returns was_empty.
Definition atomics.hh:51
bool empty() const
Check if poppingreturns null.
Definition atomics.hh:32
Allocator for STL containers.
Definition loft.hh:89
T compare_exchange_strong(T... args)
T construct_at(T... args)
#define ASE_ASSERT_RETURN(expr,...)
Return from the current function if expr evaluates to false and issue an assertion warning.
Definition cxxaux.hh:82
#define ASE_RETURN_UNLESS(cond,...)
Return silently if cond does not evaluate to true, with return value ...
Definition cxxaux.hh:79
#define ASE_ISLIKELY(expr)
Compiler hint to optimize for expr evaluating to true.
Definition cxxaux.hh:45
T destroy_at(T... args)
T fetch_and(T... args)
T fetch_or(T... args)
T load(T... args)
The Anklang C++ API namespace.
Definition api.hh:9
boost::atomic< T > Atomic
Substitute for std::atomic<> with fixes for GCC.
Definition atomics.hh:14
uint64_t uint64
A 64-bit unsigned integer, use PRI*64 in format strings.
Definition cxxaux.hh:25
T size(T... args)
typedef uintptr_t
bool push(const Value &value)
Add a copy of value to top of the stack, returns if the stack was empty (true).
Definition atomics.hh:192
bool empty() const
Return true if the stack holds no items.
Definition atomics.hh:181
bool pop(Value &value)
Pop value from top of the stack, returns if value was reassigned (true), otherwise the stack was empt...
Definition atomics.hh:198
bool push(Value &&value)
Add value to top of the stack, returns if the stack was empty (true).
Definition atomics.hh:184
Value type used to interface with various property types.
Definition value.hh:54