31 #ifndef ETL_SPSC_QUEUE_ATOMIC_INCLUDED
32 #define ETL_SPSC_QUEUE_ATOMIC_INCLUDED
53 template <const
size_t MEMORY_MODEL = etl::memory_model::MEMORY_MODEL_LARGE>
54 class queue_spsc_atomic_base
68 return read.load(etl::memory_order_acquire) == write.load(etl::memory_order_acquire);
78 size_type next_index = get_next_index(write.load(etl::memory_order_acquire), RESERVED);
80 return (next_index == read.load(etl::memory_order_acquire));
87 size_type size()
const
89 size_type write_index = write.load(etl::memory_order_acquire);
90 size_type read_index = read.load(etl::memory_order_acquire);
94 if (write_index >= read_index)
96 n = write_index - read_index;
100 n = RESERVED - read_index + write_index - 1;
110 size_type available()
const
112 return RESERVED - size() - 1;
118 size_type capacity()
const
126 size_type max_size()
const
133 queue_spsc_atomic_base(size_type reserved_)
143 static size_type get_next_index(size_type index, size_type maximum)
147 if (index == maximum)
157 const size_type RESERVED;
164 #if defined(ETL_POLYMORPHIC_SPSC_QUEUE_ATOMIC) || defined(ETL_POLYMORPHIC_CONTAINERS)
166 virtual ~queue_spsc_atomic_base()
171 ~queue_spsc_atomic_base()
188 template <
typename T, const
size_t MEMORY_MODEL = etl::memory_model::MEMORY_MODEL_LARGE>
189 class iqueue_spsc_atomic :
public queue_spsc_atomic_base<MEMORY_MODEL>
193 typedef typename etl::queue_spsc_atomic_base<MEMORY_MODEL> base_t;
197 typedef T value_type;
198 typedef T& reference;
199 typedef const T& const_reference;
200 #if ETL_CPP11_SUPPORTED
201 typedef T&& rvalue_reference;
203 typedef typename base_t::size_type size_type;
207 using base_t::RESERVED;
208 using base_t::get_next_index;
213 bool push(const_reference value)
215 size_type write_index = write.load(etl::memory_order_relaxed);
216 size_type next_index = get_next_index(write_index, RESERVED);
218 if (next_index != read.load(etl::memory_order_acquire))
220 ::new (&p_buffer[write_index]) T(value);
222 write.store(next_index, etl::memory_order_release);
231 #if ETL_CPP11_SUPPORTED
235 bool push(rvalue_reference value)
237 size_type write_index = write.load(etl::memory_order_relaxed);
238 size_type next_index = get_next_index(write_index, RESERVED);
240 if (next_index != read.load(etl::memory_order_acquire))
242 ::new (&p_buffer[write_index]) T(etl::move(value));
244 write.store(next_index, etl::memory_order_release);
254 #if ETL_CPP11_SUPPORTED && ETL_NOT_USING_STLPORT && !defined(ETL_QUEUE_ATOMIC_FORCE_CPP03)
259 template <
typename ... Args>
260 bool emplace(Args&&... args)
262 size_type write_index = write.load(etl::memory_order_relaxed);
263 size_type next_index = get_next_index(write_index, RESERVED);
265 if (next_index != read.load(etl::memory_order_acquire))
267 ::new (&p_buffer[write_index]) T(etl::forward<Args>(args)...);
269 write.store(next_index, etl::memory_order_release);
282 template <
typename T1>
283 bool emplace(
const T1& value1)
285 size_type write_index = write.load(etl::memory_order_relaxed);
286 size_type next_index = get_next_index(write_index, RESERVED);
288 if (next_index != read.load(etl::memory_order_acquire))
290 ::new (&p_buffer[write_index]) T(value1);
292 write.store(next_index, etl::memory_order_release);
305 template <
typename T1,
typename T2>
306 bool emplace(
const T1& value1,
const T2& value2)
308 size_type write_index = write.load(etl::memory_order_relaxed);
309 size_type next_index = get_next_index(write_index, RESERVED);
311 if (next_index != read.load(etl::memory_order_acquire))
313 ::new (&p_buffer[write_index]) T(value1, value2);
315 write.store(next_index, etl::memory_order_release);
328 template <
typename T1,
typename T2,
typename T3>
329 bool emplace(
const T1& value1,
const T2& value2,
const T3& value3)
331 size_type write_index = write.load(etl::memory_order_relaxed);
332 size_type next_index = get_next_index(write_index, RESERVED);
334 if (next_index != read.load(etl::memory_order_acquire))
336 ::new (&p_buffer[write_index]) T(value1, value2, value3);
338 write.store(next_index, etl::memory_order_release);
351 template <
typename T1,
typename T2,
typename T3,
typename T4>
352 bool emplace(
const T1& value1,
const T2& value2,
const T3& value3,
const T4& value4)
354 size_type write_index = write.load(etl::memory_order_relaxed);
355 size_type next_index = get_next_index(write_index, RESERVED);
357 if (next_index != read.load(etl::memory_order_acquire))
359 ::new (&p_buffer[write_index]) T(value1, value2, value3, value4);
361 write.store(next_index, etl::memory_order_release);
374 bool pop(reference value)
376 size_type read_index = read.load(etl::memory_order_relaxed);
378 if (read_index == write.load(etl::memory_order_acquire))
384 size_type next_index = get_next_index(read_index, RESERVED);
386 value = p_buffer[read_index];
387 p_buffer[read_index].~T();
389 read.store(next_index, etl::memory_order_release);
394 #if ETL_CPP11_SUPPORTED
398 bool pop(rvalue_reference value)
400 size_type read_index = read.load(etl::memory_order_relaxed);
402 if (read_index == write.load(etl::memory_order_acquire))
408 size_type next_index = get_next_index(read_index, RESERVED);
410 value = etl::move(p_buffer[read_index]);
411 p_buffer[read_index].~T();
413 read.store(next_index, etl::memory_order_release);
424 size_type read_index = read.load(etl::memory_order_relaxed);
426 if (read_index == write.load(etl::memory_order_acquire))
432 size_type next_index = get_next_index(read_index, RESERVED);
434 p_buffer[read_index].~T();
436 read.store(next_index, etl::memory_order_release);
459 iqueue_spsc_atomic(T* p_buffer_, size_type reserved_)
468 iqueue_spsc_atomic(
const iqueue_spsc_atomic&) ETL_DELETE;
469 iqueue_spsc_atomic& operator =(
const iqueue_spsc_atomic&) ETL_DELETE;
471 #if ETL_CPP11_SUPPORTED
472 iqueue_spsc_atomic(iqueue_spsc_atomic&&) =
delete;
473 iqueue_spsc_atomic& operator =(iqueue_spsc_atomic&&) =
delete;
487 template <
typename T,
size_t SIZE, const
size_t MEMORY_MODEL = etl::memory_model::MEMORY_MODEL_LARGE>
488 class queue_spsc_atomic :
public iqueue_spsc_atomic<T, MEMORY_MODEL>
492 typedef typename etl::iqueue_spsc_atomic<T, MEMORY_MODEL> base_t;
496 typedef typename base_t::size_type size_type;
500 static const size_type RESERVED_SIZE = size_type(SIZE + 1);
506 static const size_type MAX_SIZE = size_type(SIZE);
512 : base_t(reinterpret_cast<T*>(&buffer[0]), RESERVED_SIZE)
For all types except bool and pointers.
Definition: atomic_gcc_sync.h:69
Definition: alignment.h:116
Definition: integral_limits.h:54
add_rvalue_reference
Definition: type_traits_generator.h:1348
Definition: absolute.h:37
Definition: memory_model.h:48