Embedded Template Library  1.0
atomic_gcc_sync.h
1 /******************************************************************************
2 The MIT License(MIT)
3 
4 Embedded Template Library.
5 https://github.com/ETLCPP/etl
6 https://www.etlcpp.com
7 
8 Copyright(c) 2017 jwellbelove
9 
10 Permission is hereby granted, free of charge, to any person obtaining a copy
11 of this software and associated documentation files(the "Software"), to deal
12 in the Software without restriction, including without limitation the rights
13 to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
14 copies of the Software, and to permit persons to whom the Software is
15 furnished to do so, subject to the following conditions :
16 
17 The above copyright notice and this permission notice shall be included in all
18 copies or substantial portions of the Software.
19 
20 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
23 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26 SOFTWARE.
27 ******************************************************************************/
28 
29 #ifndef ETL_ATOMIC_GCC_SYNC_INCLUDED
30 #define ETL_ATOMIC_GCC_SYNC_INCLUDED
31 
32 #include "../platform.h"
33 #include "../type_traits.h"
34 #include "../static_assert.h"
35 #include "../nullptr.h"
36 #include "../char_traits.h"
37 
38 #include <stdint.h>
39 #include <string.h>
40 
41 #if defined(ETL_COMPILER_GCC)
42 #pragma GCC diagnostic push
43 #pragma GCC diagnostic ignored "-Wunused-parameter"
44 #pragma GCC diagnostic ignored "-Wunused-value"
45 #endif
46 
47 namespace etl
48 {
49  //***************************************************************************
50  // Atomic type for pre C++11 GCC compilers that support the builtin '__sync' functions.
51  // Only integral and pointer types are supported.
52  //***************************************************************************
53 
54  typedef enum memory_order
55  {
56  memory_order_relaxed,
57  memory_order_consume,
58  memory_order_acquire,
59  memory_order_release,
60  memory_order_acq_rel,
61  memory_order_seq_cst
62  } memory_order;
63 
64  //***************************************************************************
66  //***************************************************************************
67  template <typename T>
68  class atomic
69  {
70  public:
71 
72  ETL_STATIC_ASSERT(etl::is_integral<T>::value, "Only integral types are supported");
73 
74  atomic()
75  : value(0)
76  {
77  }
78 
79  atomic(T v)
80  : value(v)
81  {
82  }
83 
84  // Assignment
85  T operator =(T v)
86  {
87  store(v);
88 
89  return v;
90  }
91 
92  T operator =(T v) volatile
93  {
94  store(v);
95 
96  return v;
97  }
98 
99  // Pre-increment
100  T operator ++()
101  {
102  return __sync_add_and_fetch(&value, 1);
103  }
104 
105  T operator ++() volatile
106  {
107  return __sync_add_and_fetch(&value, 1);
108  }
109 
110  // Post-increment
111  T operator ++(int)
112  {
113  return __sync_fetch_and_add(&value, 1);
114  }
115 
116  T operator ++(int) volatile
117  {
118  return __sync_fetch_and_add(&value, 1);
119  }
120 
121  // Pre-decrement
122  T operator --()
123  {
124  return __sync_sub_and_fetch(&value, 1);
125  }
126 
127  T operator --() volatile
128  {
129  return __sync_sub_and_fetch(&value, 1);
130  }
131 
132  // Post-decrement
133  T operator --(int)
134  {
135  return __sync_fetch_and_sub(&value, 1);
136  }
137 
138  T operator --(int) volatile
139  {
140  return __sync_fetch_and_sub(&value, 1);
141  }
142 
143  // Add
144  T operator +=(T v)
145  {
146  return __sync_fetch_and_add(&value, v);
147  }
148 
149  T operator +=(T v) volatile
150  {
151  return __sync_fetch_and_add(&value, v);
152  }
153 
154  // Subtract
155  T operator -=(T v)
156  {
157  return __sync_fetch_and_sub(&value, v);
158  }
159 
160  T operator -=(T v) volatile
161  {
162  return __sync_fetch_and_sub(&value, v);
163  }
164 
165  // And
166  T operator &=(T v)
167  {
168  return __sync_fetch_and_and(&value, v);
169  }
170 
171  T operator &=(T v) volatile
172  {
173  return __sync_fetch_and_and(&value, v);
174  }
175 
176  // Or
177  T operator |=(T v)
178  {
179  return __sync_fetch_and_or(&value, v);
180  }
181 
182  T operator |=(T v) volatile
183  {
184  return __sync_fetch_and_or(&value, v);
185  }
186 
187  // Exclusive or
188  T operator ^=(T v)
189  {
190  return __sync_fetch_and_xor(&value, v);
191  }
192 
193  T operator ^=(T v) volatile
194  {
195  return __sync_fetch_and_xor(&value, v);
196  }
197 
198  // Conversion operator
199  operator T () const
200  {
201  return __sync_fetch_and_add(&value, 0);
202  }
203 
204  operator T() volatile const
205  {
206  return __sync_fetch_and_add(&value, 0);
207  }
208 
209  // Is lock free?
210  bool is_lock_free() const
211  {
212  return true;
213  }
214 
215  bool is_lock_free() const volatile
216  {
217  return true;
218  }
219 
220  // Store
221  void store(T v, etl::memory_order order = etl::memory_order_seq_cst)
222  {
223  (void)__sync_lock_test_and_set(&value, v);
224  }
225 
226  void store(T v, etl::memory_order order = etl::memory_order_seq_cst) volatile
227  {
228  (void)__sync_lock_test_and_set(&value, v);
229  }
230 
231  // Load
232  T load(etl::memory_order order = etl::memory_order_seq_cst) const
233  {
234  return __sync_fetch_and_add(&value, 0);
235  }
236 
237  T load(etl::memory_order order = etl::memory_order_seq_cst) const volatile
238  {
239  return __sync_fetch_and_add(&value, 0);
240  }
241 
242  // Fetch add
243  T fetch_add(T v, etl::memory_order order = etl::memory_order_seq_cst)
244  {
245  return __sync_fetch_and_add(&value, v);
246  }
247 
248  T fetch_add(T v, etl::memory_order order = etl::memory_order_seq_cst) volatile
249  {
250  return __sync_fetch_and_add(&value, v);
251  }
252 
253  // Fetch subtract
254  T fetch_sub(T v, etl::memory_order order = etl::memory_order_seq_cst)
255  {
256  return __sync_fetch_and_sub(&value, v);
257  }
258 
259  T fetch_sub(T v, etl::memory_order order = etl::memory_order_seq_cst) volatile
260  {
261  return __sync_fetch_and_sub(&value, v);
262  }
263 
264  // Fetch or
265  T fetch_or(T v, etl::memory_order order = etl::memory_order_seq_cst)
266  {
267  return __sync_fetch_and_or(&value, v);
268  }
269 
270  T fetch_or(T v, etl::memory_order order = etl::memory_order_seq_cst) volatile
271  {
272  return __sync_fetch_and_or(&value, v);
273  }
274 
275  // Fetch and
276  T fetch_and(T v, etl::memory_order order = etl::memory_order_seq_cst)
277  {
278  return __sync_fetch_and_and(&value, v);
279  }
280 
281  T fetch_and(T v, etl::memory_order order = etl::memory_order_seq_cst) volatile
282  {
283  return __sync_fetch_and_and(&value, v);
284  }
285 
286  // Fetch exclusive or
287  T fetch_xor(T v, etl::memory_order order = etl::memory_order_seq_cst)
288  {
289  return __sync_fetch_and_xor(&value, v);
290  }
291 
292  T fetch_xor(T v, etl::memory_order order = etl::memory_order_seq_cst) volatile
293  {
294  return __sync_fetch_and_xor(&value, v);
295  }
296 
297  // Exchange
298  T exchange(T v, etl::memory_order order = etl::memory_order_seq_cst)
299  {
300  return __sync_lock_test_and_set(&value, v);
301  }
302 
303  T exchange(T v, etl::memory_order order = etl::memory_order_seq_cst) volatile
304  {
305  return __sync_lock_test_and_set(&value, v);
306  }
307 
308  // Compare exchange weak
309  bool compare_exchange_weak(T& expected, T desired, etl::memory_order order = etl::memory_order_seq_cst)
310  {
311  T old = __sync_val_compare_and_swap(&value, expected, desired);
312 
313  if (old == expected)
314  {
315  return true;
316  }
317  else
318  {
319  expected = old;
320  return false;
321  }
322  }
323 
324  bool compare_exchange_weak(T& expected, T desired, etl::memory_order order = etl::memory_order_seq_cst) volatile
325  {
326  T old = __sync_val_compare_and_swap(&value, expected, desired);
327 
328  if (old == expected)
329  {
330  return true;
331  }
332  else
333  {
334  expected = old;
335  return false;
336  }
337  }
338 
339  bool compare_exchange_weak(T& expected, T desired, etl::memory_order success, etl::memory_order failure)
340  {
341  T old = __sync_val_compare_and_swap(&value, expected, desired);
342 
343  if (old == expected)
344  {
345  return true;
346  }
347  else
348  {
349  expected = old;
350  return false;
351  }
352  }
353 
354  bool compare_exchange_weak(T& expected, T desired, etl::memory_order success, etl::memory_order failure) volatile
355  {
356  T old = __sync_val_compare_and_swap(&value, expected, desired);
357 
358  if (old == expected)
359  {
360  return true;
361  }
362  else
363  {
364  expected = old;
365  return false;
366  }
367  }
368 
369  // Compare exchange strong
370  bool compare_exchange_strong(T& expected, T desired, etl::memory_order order = etl::memory_order_seq_cst)
371  {
372  T old = expected;
373 
374  while (!compare_exchange_weak(old, desired))
375  {
376  if (memcmp(&old, &expected, sizeof(T)))
377  {
378  expected = old;
379  return false;
380  }
381  }
382 
383  return true;
384  }
385 
386  bool compare_exchange_strong(T& expected, T desired, etl::memory_order order = etl::memory_order_seq_cst) volatile
387  {
388  T old = expected;
389 
390  while (!compare_exchange_weak(old, desired))
391  {
392  if (memcmp(&old, &expected, sizeof(T)))
393  {
394  expected = old;
395  return false;
396  }
397  }
398 
399  return true;
400  }
401 
402  bool compare_exchange_strong(T& expected, T desired, etl::memory_order success, etl::memory_order failure)
403  {
404  T old = expected;
405 
406  while (!compare_exchange_weak(old, desired))
407  {
408  if (memcmp(&old, &expected, sizeof(T)))
409  {
410  expected = old;
411  return false;
412  }
413  }
414 
415  return true;
416  }
417 
418  bool compare_exchange_strong(T& expected, T desired, etl::memory_order success, etl::memory_order failure) volatile
419  {
420  T old = expected;
421 
422  while (!compare_exchange_weak(old, desired))
423  {
424  if (memcmp(&old, &expected, sizeof(T)))
425  {
426  expected = old;
427  return false;
428  }
429  }
430 
431  return true;
432  }
433 
434  private:
435 
436  atomic& operator =(const atomic&) ETL_DELETE;
437  atomic& operator =(const atomic&) volatile ETL_DELETE;
438 
439  mutable volatile T value;
440  };
441 
442  //***************************************************************************
444  //***************************************************************************
445  template <typename T>
446  class atomic<T*>
447  {
448  public:
449 
450  atomic()
451  : value(0U)
452  {
453  }
454 
455  atomic(T* v)
456  : value(uintptr_t(v))
457  {
458  }
459 
460  // Assignment
461  T* operator =(T* v)
462  {
463  store(v);
464 
465  return v;
466  }
467 
468  T* operator =(T* v) volatile
469  {
470  store(v);
471 
472  return v;
473  }
474 
475  // Pre-increment
476  T* operator ++()
477  {
478  return (T*)__sync_add_and_fetch(&value, sizeof(T));
479  }
480 
481  T* operator ++() volatile
482  {
483  return (T*)__sync_add_and_fetch(&value, sizeof(T));
484  }
485 
486  // Post-increment
487  T* operator ++(int)
488  {
489  return (T*)__sync_fetch_and_add(&value, sizeof(T));
490  }
491 
492  T* operator ++(int) volatile
493  {
494  return (T*)__sync_fetch_and_add(&value, sizeof(T));
495  }
496 
497  // Pre-decrement
498  T* operator --()
499  {
500  return (T*)__sync_sub_and_fetch(&value, sizeof(T));
501  }
502 
503  T* operator --() volatile
504  {
505  return (T*)__sync_sub_and_fetch(&value, sizeof(T));
506  }
507 
508  // Post-decrement
509  T* operator --(int)
510  {
511  return (T*)__sync_fetch_and_sub(&value, sizeof(T));
512  }
513 
514  T* operator --(int) volatile
515  {
516  return (T*)__sync_fetch_and_sub(&value, sizeof(T));
517  }
518 
519  // Add
520  T* operator +=(ptrdiff_t v)
521  {
522  return (T*)__sync_fetch_and_add(&value, v * sizeof(T));
523  }
524 
525  T* operator +=(ptrdiff_t v) volatile
526  {
527  return (T*)__sync_fetch_and_add(&value, v * sizeof(T));
528  }
529 
530  // Subtract
531  T* operator -=(ptrdiff_t v)
532  {
533  return (T*)__sync_fetch_and_sub(&value, v * sizeof(T));
534  }
535 
536  T* operator -=(ptrdiff_t v) volatile
537  {
538  return (T*)__sync_fetch_and_sub(&value, v * sizeof(T));
539  }
540 
541  // Conversion operator
542  operator T* () const
543  {
544  return (T*)__sync_fetch_and_add(&value, 0);
545  }
546 
547  operator T*() volatile const
548  {
549  return (T*)__sync_fetch_and_add(&value, 0);
550  }
551 
552  // Is lock free?
553  bool is_lock_free() const
554  {
555  return true;
556  }
557 
558  bool is_lock_free() const volatile
559  {
560  return true;
561  }
562 
563  // Store
564  void store(T* v, etl::memory_order order = etl::memory_order_seq_cst)
565  {
566  __sync_lock_test_and_set(&value, uintptr_t(v));
567  }
568 
569  void store(T* v, etl::memory_order order = etl::memory_order_seq_cst) volatile
570  {
571  __sync_lock_test_and_set(&value, uintptr_t(v));
572  }
573 
574  // Load
575  T* load(etl::memory_order order = etl::memory_order_seq_cst) const
576  {
577  return (T*)__sync_fetch_and_add(&value, 0);
578  }
579 
580  T* load(etl::memory_order order = etl::memory_order_seq_cst) const volatile
581  {
582  return (T*)__sync_fetch_and_add(&value, 0);
583  }
584 
585  // Fetch add
586  T* fetch_add(ptrdiff_t v, etl::memory_order order = etl::memory_order_seq_cst)
587  {
588  return (T*)__sync_fetch_and_add(&value, v);
589  }
590 
591  T* fetch_add(ptrdiff_t v, etl::memory_order order = etl::memory_order_seq_cst) volatile
592  {
593  return (T*)__sync_fetch_and_add(&value, v);
594  }
595 
596  // Fetch subtract
597  T* fetch_sub(ptrdiff_t v, etl::memory_order order = etl::memory_order_seq_cst)
598  {
599  return (T*)__sync_fetch_and_sub(&value, v);
600  }
601 
602  T* fetch_sub(ptrdiff_t v, etl::memory_order order = etl::memory_order_seq_cst) volatile
603  {
604  return (T*)__sync_fetch_and_sub(&value, v);
605  }
606 
607  // Exchange
608  T* exchange(T* v, etl::memory_order order = etl::memory_order_seq_cst)
609  {
610  return (T*)__sync_lock_test_and_set(&value, uintptr_t(v));
611  }
612 
613  T* exchange(T* v, etl::memory_order order = etl::memory_order_seq_cst) volatile
614  {
615  return (T*)__sync_lock_test_and_set(&value, uintptr_t(v));
616  }
617 
618  // Compare exchange weak
619  bool compare_exchange_weak(T*& expected, T* desired, etl::memory_order order = etl::memory_order_seq_cst)
620  {
621  T* old = (T*)__sync_val_compare_and_swap(&value, uintptr_t(expected), uintptr_t(desired));
622 
623  if (old == expected)
624  {
625  return true;
626  }
627  else
628  {
629  expected = old;
630  return false;
631  }
632  }
633 
634  bool compare_exchange_weak(T*& expected, T* desired, etl::memory_order order = etl::memory_order_seq_cst) volatile
635  {
636  T* old = (T*)__sync_val_compare_and_swap(&value, uintptr_t(expected), uintptr_t(desired));
637 
638  if (old == expected)
639  {
640  return true;
641  }
642  else
643  {
644  expected = old;
645  return false;
646  }
647  }
648 
649  bool compare_exchange_weak(T*& expected, T* desired, etl::memory_order success, etl::memory_order failure)
650  {
651  T* old = (T*)__sync_val_compare_and_swap(&value, uintptr_t(expected), uintptr_t(desired));
652 
653  if (old == expected)
654  {
655  return true;
656  }
657  else
658  {
659  expected = old;
660  return false;
661  }
662  }
663 
664  bool compare_exchange_weak(T*& expected, T* desired, etl::memory_order success, etl::memory_order failure) volatile
665  {
666  T* old = (T*)__sync_val_compare_and_swap(&value, uintptr_t(expected), uintptr_t(desired));
667 
668  if (old == expected)
669  {
670  return true;
671  }
672  else
673  {
674  expected = old;
675  return false;
676  }
677  }
678 
679  // Compare exchange strong
680  bool compare_exchange_strong(T*& expected, T* desired, etl::memory_order order = etl::memory_order_seq_cst)
681  {
682  T* old = expected;
683 
684  while (!compare_exchange_weak(old, desired))
685  {
686  if (memcmp(&old, &expected, sizeof(T*)))
687  {
688  expected = old;
689  return false;
690  }
691  }
692 
693  return true;
694  }
695 
696  bool compare_exchange_strong(T*& expected, T* desired, etl::memory_order order = etl::memory_order_seq_cst) volatile
697  {
698  T* old = expected;
699 
700  while (!compare_exchange_weak(old, desired))
701  {
702  if (memcmp(&old, &expected, sizeof(T*)))
703  {
704  expected = old;
705  return false;
706  }
707  }
708 
709  return true;
710  }
711 
712  bool compare_exchange_strong(T*& expected, T* desired, etl::memory_order success, etl::memory_order failure)
713  {
714  T* old = expected;
715 
716  while (!compare_exchange_weak(old, desired))
717  {
718  if (memcmp(&old, &expected, sizeof(T*)))
719  {
720  expected = old;
721  return false;
722  }
723  }
724 
725  return true;
726  }
727 
728  bool compare_exchange_strong(T*& expected, T* desired, etl::memory_order success, etl::memory_order failure) volatile
729  {
730  T* old = expected;
731 
732  while (!compare_exchange_weak(old, desired))
733  {
734  if (memcmp(&old, &expected, sizeof(T*)))
735  {
736  expected = old;
737  return false;
738  }
739  }
740 
741  return true;
742  }
743 
744  private:
745 
746  atomic& operator =(const atomic&) ETL_DELETE;
747  atomic& operator =(const atomic&) volatile ETL_DELETE;
748 
749  mutable uintptr_t value;
750  };
751 
766 #if ETL_USING_8BIT_TYPES
767  typedef etl::atomic<uint8_t> atomic_uint8_t;
768  typedef etl::atomic<int8_t> atomic_int8_t;
769 #endif
774 #if ETL_USING_64BIT_TYPES
777 #endif
784 #if ETL_USING_64BIT_TYPES
787 #endif
794 #if ETL_USING_64BIT_TYPES
797 #endif
804 }
805 
806 #if defined(ETL_COMPILER_GCC)
807 #pragma GCC diagnostic pop
808 #endif
809 
810 #endif
For all types except bool and pointers.
Definition: atomic_gcc_sync.h:69
is_integral
Definition: type_traits_generator.h:941
Definition: absolute.h:37
T exchange(T &object, const T &new_value)
exchange (const)
Definition: utility.h:301