54inline lock_free_spmc_ring_buffer_base::lock_free_spmc_ring_buffer_base(
size_t capacity):
55 head_(tagged_index_t(0, 0)),
56 tail_(tagged_index_t(0, 0)),
57 ring_size_(static_cast<index_type>(enforce_valid_size(capacity)))
59#if defined(__cpp_lib_atomic_is_always_lock_free)
60 static_assert(std::atomic<tagged_index_t>::is_always_lock_free);
62 LASS_ENFORCE(head_.is_lock_free());
63 LASS_ENFORCE(tail_.is_lock_free());
70inline bool lock_free_spmc_ring_buffer_base::empty()
const
72 return tagged_index_t::empty(head_.load(std::memory_order_acquire), tail_.load(std::memory_order_acquire));
79inline bool lock_free_spmc_ring_buffer_base::full()
const
81 return tagged_index_t::full(head_.load(std::memory_order_acquire), tail_.load(std::memory_order_acquire));
86lock_free_spmc_ring_buffer_base::tagged_index_t
87inline lock_free_spmc_ring_buffer_base::next_index(tagged_index_t index)
const
89 const index_type i = ++index.index;
91 ? tagged_index_t(i, index.tag)
92 : tagged_index_t(0, index.tag + 1);
97inline size_t lock_free_spmc_ring_buffer_base::enforce_valid_size(
size_t size)
101 throw std::length_error(
"ring buffer capacitance cannot be zero");
103 if (size > num::NumTraits<index_type>::max)
105 throw std::length_error(
"exceeded maximum ring buffer capacitance");
115lock_free_spmc_value_ring_buffer<T>::lock_free_spmc_value_ring_buffer(
size_t capacity):
116 lock_free_spmc_ring_buffer_base(capacity),
127bool lock_free_spmc_value_ring_buffer<T>::try_push(
const value_type& x)
129 const tagged_index_t head = head_.load(std::memory_order_relaxed);
130 const tagged_index_t tail = tail_.load(std::memory_order_acquire);
131 if (tagged_index_t::full(head, tail))
133 LASS_ASSERT(head.tag == tail.tag + 1);
136 ring_[head.index].store(x, std::memory_order_release);
137 head_.store(next_index(head), std::memory_order_release);
147bool lock_free_spmc_value_ring_buffer<T>::try_push(value_type&& x)
149 const tagged_index_t head = head_.load(std::memory_order_relaxed);
150 const tagged_index_t tail = tail_.load(std::memory_order_acquire);
151 if (tagged_index_t::full(head, tail))
153 LASS_ASSERT(head.tag == tail.tag + 1);
156 ring_[head.index].store(std::move(x), std::memory_order_release);
157 head_.store(next_index(head), std::memory_order_release);
167template <
class... Args>
168bool lock_free_spmc_value_ring_buffer<T>::try_emplace(Args&&... args)
170 const tagged_index_t head = head_.load(std::memory_order_relaxed);
171 const tagged_index_t tail = tail_.load(std::memory_order_acquire);
172 if (tagged_index_t::full(head, tail))
174 LASS_ASSERT(head.tag == tail.tag + 1);
177 ring_[head.index].store(value_type(std::forward<Args...>(args...)), std::memory_order_release);
178 head_.store(next_index(head), std::memory_order_release);
188bool lock_free_spmc_value_ring_buffer<T>::try_pop(value_type& x)
190 tagged_index_t tail = tail_.load(std::memory_order_acquire);
193 const tagged_index_t head = head_.load(std::memory_order_acquire);
194 if (tagged_index_t::empty(head, tail))
198 x = ring_[tail.index].load(std::memory_order_acquire);
199 if (tail_.compare_exchange_weak(tail, next_index(tail)))
210template <
typename T,
typename A>
211lock_free_spmc_object_ring_buffer<T, A>::lock_free_spmc_object_ring_buffer(
size_t capacity):
212 lock_free_spmc_ring_buffer_base(capacity),
213 value_allocator_(sizeof(T)),
220template <
typename T,
typename A>
221lock_free_spmc_object_ring_buffer<T, A>::~lock_free_spmc_object_ring_buffer()
223 const tagged_index_t head = head_.load(std::memory_order_acquire);
224 const tagged_index_t tail = tail_.load(std::memory_order_acquire);
225 for (tagged_index_t i = tail; !tagged_index_t::empty(i, head); i = next_index(i))
227 value_type* p = ring_[i.index].load(std::memory_order_relaxed);
229 value_allocator_.deallocate(p);
238template <
typename T,
typename A>
239bool lock_free_spmc_object_ring_buffer<T, A>::try_push(
const value_type& x)
241 const tagged_index_t head = head_.load(std::memory_order_relaxed);
242 const tagged_index_t tail = tail_.load(std::memory_order_acquire);
243 if (tagged_index_t::full(head, tail))
247 void* p = value_allocator_.allocate();
250 ring_[head.index] =
new (p) value_type(x);
254 value_allocator_.deallocate(p);
257 head_.store(next_index(head), std::memory_order_release);
266template <
typename T,
typename A>
267bool lock_free_spmc_object_ring_buffer<T, A>::try_push(value_type&& x)
269 const tagged_index_t head = head_.load(std::memory_order_relaxed);
270 const tagged_index_t tail = tail_.load(std::memory_order_acquire);
271 if (tagged_index_t::full(head, tail))
275 void* p = value_allocator_.allocate();
278 ring_[head.index] =
new (p) value_type(std::move(x));
282 value_allocator_.deallocate(p);
285 head_.store(next_index(head), std::memory_order_release);
294template <
typename T,
typename A>
295template <
class... Args>
296bool lock_free_spmc_object_ring_buffer<T, A>::try_emplace(Args&&... args)
298 const tagged_index_t head = head_.load(std::memory_order_relaxed);
299 const tagged_index_t tail = tail_.load(std::memory_order_acquire);
300 if (tagged_index_t::full(head, tail))
304 void* p = value_allocator_.allocate();
307 ring_[head.index] =
new (p) value_type(std::forward<Args...>(args...));
311 value_allocator_.deallocate(p);
314 head_.store(next_index(head), std::memory_order_release);
322template <
typename T,
typename A>
323bool lock_free_spmc_object_ring_buffer<T, A>::try_pop(value_type& x)
325 tagged_index_t tail = tail_.load(std::memory_order_acquire);
328 const tagged_index_t head = head_.load(std::memory_order_acquire);
329 if (tagged_index_t::empty(head, tail))
333 value_type* p = ring_[tail.index];
334 if (tail_.compare_exchange_weak(tail, next_index(tail)))
338 value_allocator_.deallocate(p);
lass extensions to the standard library
Library for Assembled Shared Sources.