3 // Copyright (C) 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/atomic_base.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly. @headername{atomic}
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
35 #include <bits/c++config.h>
38 #include <bits/atomic_lockfree_defines.h>
40 namespace std _GLIBCXX_VISIBILITY(default)
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
45 * @defgroup atomics Atomics
47 * Components for performing atomic operations.
51 /// Enumeration for memory_order
52 typedef enum memory_order
62 // Drop release ordering as per [atomics.types.operations.req]/21
63 constexpr memory_order
64 __cmpexch_failure_order(memory_order __m) noexcept
66 return __m == memory_order_acq_rel ? memory_order_acquire
67 : __m == memory_order_release ? memory_order_relaxed : __m;
71 atomic_thread_fence(memory_order __m) noexcept
72 { __atomic_thread_fence(__m); }
75 atomic_signal_fence(memory_order __m) noexcept
76 { __atomic_thread_fence(__m); }
79 template<typename _Tp>
81 kill_dependency(_Tp __y) noexcept
88 // Base types for atomics.
89 template<typename _IntTp>
93 typedef __atomic_base<char> atomic_char;
96 typedef __atomic_base<signed char> atomic_schar;
99 typedef __atomic_base<unsigned char> atomic_uchar;
102 typedef __atomic_base<short> atomic_short;
105 typedef __atomic_base<unsigned short> atomic_ushort;
108 typedef __atomic_base<int> atomic_int;
111 typedef __atomic_base<unsigned int> atomic_uint;
114 typedef __atomic_base<long> atomic_long;
117 typedef __atomic_base<unsigned long> atomic_ulong;
120 typedef __atomic_base<long long> atomic_llong;
123 typedef __atomic_base<unsigned long long> atomic_ullong;
126 typedef __atomic_base<wchar_t> atomic_wchar_t;
129 typedef __atomic_base<char16_t> atomic_char16_t;
132 typedef __atomic_base<char32_t> atomic_char32_t;
135 typedef __atomic_base<char32_t> atomic_char32_t;
138 /// atomic_int_least8_t
139 typedef __atomic_base<int_least8_t> atomic_int_least8_t;
141 /// atomic_uint_least8_t
142 typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
144 /// atomic_int_least16_t
145 typedef __atomic_base<int_least16_t> atomic_int_least16_t;
147 /// atomic_uint_least16_t
148 typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
150 /// atomic_int_least32_t
151 typedef __atomic_base<int_least32_t> atomic_int_least32_t;
153 /// atomic_uint_least32_t
154 typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
156 /// atomic_int_least64_t
157 typedef __atomic_base<int_least64_t> atomic_int_least64_t;
159 /// atomic_uint_least64_t
160 typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
163 /// atomic_int_fast8_t
164 typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
166 /// atomic_uint_fast8_t
167 typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
169 /// atomic_int_fast16_t
170 typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
172 /// atomic_uint_fast16_t
173 typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
175 /// atomic_int_fast32_t
176 typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
178 /// atomic_uint_fast32_t
179 typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
181 /// atomic_int_fast64_t
182 typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
184 /// atomic_uint_fast64_t
185 typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
189 typedef __atomic_base<intptr_t> atomic_intptr_t;
192 typedef __atomic_base<uintptr_t> atomic_uintptr_t;
195 typedef __atomic_base<size_t> atomic_size_t;
198 typedef __atomic_base<intmax_t> atomic_intmax_t;
201 typedef __atomic_base<uintmax_t> atomic_uintmax_t;
204 typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
207 #define ATOMIC_VAR_INIT(_VI) { _VI }
209 template<typename _Tp>
212 template<typename _Tp>
217 * @brief Base type for atomic_flag.
219 * Base type is POD with data, allowing atomic_flag to derive from
220 * it and meet the standard layout type requirement. In addition to
221 * compatibilty with a C interface, this allows different
222 * implementations of atomic_flag to use the same atomic operation
223 * functions, via a standard conversion to the __atomic_flag_base
226 _GLIBCXX_BEGIN_EXTERN_C
228 struct __atomic_flag_base
230 /* The target's "set" value for test-and-set may not be exactly 1. */
231 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
238 _GLIBCXX_END_EXTERN_C
240 #define ATOMIC_FLAG_INIT { 0 }
243 struct atomic_flag : public __atomic_flag_base
245 atomic_flag() noexcept = default;
246 ~atomic_flag() noexcept = default;
247 atomic_flag(const atomic_flag&) = delete;
248 atomic_flag& operator=(const atomic_flag&) = delete;
249 atomic_flag& operator=(const atomic_flag&) volatile = delete;
251 // Conversion to ATOMIC_FLAG_INIT.
252 constexpr atomic_flag(bool __i) noexcept
253 : __atomic_flag_base({ __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0 })
257 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
259 return __atomic_test_and_set (&_M_i, __m);
263 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
265 return __atomic_test_and_set (&_M_i, __m);
269 clear(memory_order __m = memory_order_seq_cst) noexcept
271 __glibcxx_assert(__m != memory_order_consume);
272 __glibcxx_assert(__m != memory_order_acquire);
273 __glibcxx_assert(__m != memory_order_acq_rel);
275 __atomic_clear (&_M_i, __m);
279 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
281 __glibcxx_assert(__m != memory_order_consume);
282 __glibcxx_assert(__m != memory_order_acquire);
283 __glibcxx_assert(__m != memory_order_acq_rel);
285 __atomic_clear (&_M_i, __m);
290 /// Base class for atomic integrals.
292 // For each of the integral types, define atomic_[integral type] struct
296 // atomic_schar signed char
297 // atomic_uchar unsigned char
298 // atomic_short short
299 // atomic_ushort unsigned short
301 // atomic_uint unsigned int
303 // atomic_ulong unsigned long
304 // atomic_llong long long
305 // atomic_ullong unsigned long long
306 // atomic_char16_t char16_t
307 // atomic_char32_t char32_t
308 // atomic_wchar_t wchar_t
310 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
311 // 8 bytes, since that is what GCC built-in functions for atomic
312 // memory access expect.
313 template<typename _ITp>
317 typedef _ITp __int_type;
322 __atomic_base() noexcept = default;
323 ~__atomic_base() noexcept = default;
324 __atomic_base(const __atomic_base&) = delete;
325 __atomic_base& operator=(const __atomic_base&) = delete;
326 __atomic_base& operator=(const __atomic_base&) volatile = delete;
328 // Requires __int_type convertible to _M_i.
329 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
331 operator __int_type() const noexcept
334 operator __int_type() const volatile noexcept
338 operator=(__int_type __i) noexcept
345 operator=(__int_type __i) volatile noexcept
352 operator++(int) noexcept
353 { return fetch_add(1); }
356 operator++(int) volatile noexcept
357 { return fetch_add(1); }
360 operator--(int) noexcept
361 { return fetch_sub(1); }
364 operator--(int) volatile noexcept
365 { return fetch_sub(1); }
368 operator++() noexcept
369 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
372 operator++() volatile noexcept
373 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
376 operator--() noexcept
377 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
380 operator--() volatile noexcept
381 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
384 operator+=(__int_type __i) noexcept
385 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
388 operator+=(__int_type __i) volatile noexcept
389 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
392 operator-=(__int_type __i) noexcept
393 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
396 operator-=(__int_type __i) volatile noexcept
397 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
400 operator&=(__int_type __i) noexcept
401 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
404 operator&=(__int_type __i) volatile noexcept
405 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
408 operator|=(__int_type __i) noexcept
409 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
412 operator|=(__int_type __i) volatile noexcept
413 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
416 operator^=(__int_type __i) noexcept
417 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
420 operator^=(__int_type __i) volatile noexcept
421 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
424 is_lock_free() const noexcept
425 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
428 is_lock_free() const volatile noexcept
429 { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
432 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
434 __glibcxx_assert(__m != memory_order_acquire);
435 __glibcxx_assert(__m != memory_order_acq_rel);
436 __glibcxx_assert(__m != memory_order_consume);
438 __atomic_store_n(&_M_i, __i, __m);
442 store(__int_type __i,
443 memory_order __m = memory_order_seq_cst) volatile noexcept
445 __glibcxx_assert(__m != memory_order_acquire);
446 __glibcxx_assert(__m != memory_order_acq_rel);
447 __glibcxx_assert(__m != memory_order_consume);
449 __atomic_store_n(&_M_i, __i, __m);
453 load(memory_order __m = memory_order_seq_cst) const noexcept
455 __glibcxx_assert(__m != memory_order_release);
456 __glibcxx_assert(__m != memory_order_acq_rel);
458 return __atomic_load_n(&_M_i, __m);
462 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
464 __glibcxx_assert(__m != memory_order_release);
465 __glibcxx_assert(__m != memory_order_acq_rel);
467 return __atomic_load_n(&_M_i, __m);
471 exchange(__int_type __i,
472 memory_order __m = memory_order_seq_cst) noexcept
474 return __atomic_exchange_n(&_M_i, __i, __m);
479 exchange(__int_type __i,
480 memory_order __m = memory_order_seq_cst) volatile noexcept
482 return __atomic_exchange_n(&_M_i, __i, __m);
486 compare_exchange_weak(__int_type& __i1, __int_type __i2,
487 memory_order __m1, memory_order __m2) noexcept
489 __glibcxx_assert(__m2 != memory_order_release);
490 __glibcxx_assert(__m2 != memory_order_acq_rel);
491 __glibcxx_assert(__m2 <= __m1);
493 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
497 compare_exchange_weak(__int_type& __i1, __int_type __i2,
499 memory_order __m2) volatile noexcept
501 __glibcxx_assert(__m2 != memory_order_release);
502 __glibcxx_assert(__m2 != memory_order_acq_rel);
503 __glibcxx_assert(__m2 <= __m1);
505 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
509 compare_exchange_weak(__int_type& __i1, __int_type __i2,
510 memory_order __m = memory_order_seq_cst) noexcept
512 return compare_exchange_weak(__i1, __i2, __m,
513 __cmpexch_failure_order(__m));
517 compare_exchange_weak(__int_type& __i1, __int_type __i2,
518 memory_order __m = memory_order_seq_cst) volatile noexcept
520 return compare_exchange_weak(__i1, __i2, __m,
521 __cmpexch_failure_order(__m));
525 compare_exchange_strong(__int_type& __i1, __int_type __i2,
526 memory_order __m1, memory_order __m2) noexcept
528 __glibcxx_assert(__m2 != memory_order_release);
529 __glibcxx_assert(__m2 != memory_order_acq_rel);
530 __glibcxx_assert(__m2 <= __m1);
532 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
536 compare_exchange_strong(__int_type& __i1, __int_type __i2,
538 memory_order __m2) volatile noexcept
540 __glibcxx_assert(__m2 != memory_order_release);
541 __glibcxx_assert(__m2 != memory_order_acq_rel);
542 __glibcxx_assert(__m2 <= __m1);
544 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
548 compare_exchange_strong(__int_type& __i1, __int_type __i2,
549 memory_order __m = memory_order_seq_cst) noexcept
551 return compare_exchange_strong(__i1, __i2, __m,
552 __cmpexch_failure_order(__m));
556 compare_exchange_strong(__int_type& __i1, __int_type __i2,
557 memory_order __m = memory_order_seq_cst) volatile noexcept
559 return compare_exchange_strong(__i1, __i2, __m,
560 __cmpexch_failure_order(__m));
564 fetch_add(__int_type __i,
565 memory_order __m = memory_order_seq_cst) noexcept
566 { return __atomic_fetch_add(&_M_i, __i, __m); }
569 fetch_add(__int_type __i,
570 memory_order __m = memory_order_seq_cst) volatile noexcept
571 { return __atomic_fetch_add(&_M_i, __i, __m); }
574 fetch_sub(__int_type __i,
575 memory_order __m = memory_order_seq_cst) noexcept
576 { return __atomic_fetch_sub(&_M_i, __i, __m); }
579 fetch_sub(__int_type __i,
580 memory_order __m = memory_order_seq_cst) volatile noexcept
581 { return __atomic_fetch_sub(&_M_i, __i, __m); }
584 fetch_and(__int_type __i,
585 memory_order __m = memory_order_seq_cst) noexcept
586 { return __atomic_fetch_and(&_M_i, __i, __m); }
589 fetch_and(__int_type __i,
590 memory_order __m = memory_order_seq_cst) volatile noexcept
591 { return __atomic_fetch_and(&_M_i, __i, __m); }
594 fetch_or(__int_type __i,
595 memory_order __m = memory_order_seq_cst) noexcept
596 { return __atomic_fetch_or(&_M_i, __i, __m); }
599 fetch_or(__int_type __i,
600 memory_order __m = memory_order_seq_cst) volatile noexcept
601 { return __atomic_fetch_or(&_M_i, __i, __m); }
604 fetch_xor(__int_type __i,
605 memory_order __m = memory_order_seq_cst) noexcept
606 { return __atomic_fetch_xor(&_M_i, __i, __m); }
609 fetch_xor(__int_type __i,
610 memory_order __m = memory_order_seq_cst) volatile noexcept
611 { return __atomic_fetch_xor(&_M_i, __i, __m); }
615 /// Partial specialization for pointer types.
616 template<typename _PTp>
617 struct __atomic_base<_PTp*>
620 typedef _PTp* __pointer_type;
624 // Factored out to facilitate explicit specialization.
626 _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
629 _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
632 __atomic_base() noexcept = default;
633 ~__atomic_base() noexcept = default;
634 __atomic_base(const __atomic_base&) = delete;
635 __atomic_base& operator=(const __atomic_base&) = delete;
636 __atomic_base& operator=(const __atomic_base&) volatile = delete;
638 // Requires __pointer_type convertible to _M_p.
639 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
641 operator __pointer_type() const noexcept
644 operator __pointer_type() const volatile noexcept
648 operator=(__pointer_type __p) noexcept
655 operator=(__pointer_type __p) volatile noexcept
662 operator++(int) noexcept
663 { return fetch_add(1); }
666 operator++(int) volatile noexcept
667 { return fetch_add(1); }
670 operator--(int) noexcept
671 { return fetch_sub(1); }
674 operator--(int) volatile noexcept
675 { return fetch_sub(1); }
678 operator++() noexcept
679 { return __atomic_add_fetch(&_M_p, _M_type_size(1),
680 memory_order_seq_cst); }
683 operator++() volatile noexcept
684 { return __atomic_add_fetch(&_M_p, _M_type_size(1),
685 memory_order_seq_cst); }
688 operator--() noexcept
689 { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
690 memory_order_seq_cst); }
693 operator--() volatile noexcept
694 { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
695 memory_order_seq_cst); }
698 operator+=(ptrdiff_t __d) noexcept
699 { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
700 memory_order_seq_cst); }
703 operator+=(ptrdiff_t __d) volatile noexcept
704 { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
705 memory_order_seq_cst); }
708 operator-=(ptrdiff_t __d) noexcept
709 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
710 memory_order_seq_cst); }
713 operator-=(ptrdiff_t __d) volatile noexcept
714 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
715 memory_order_seq_cst); }
718 is_lock_free() const noexcept
719 { return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
722 is_lock_free() const volatile noexcept
723 { return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
726 store(__pointer_type __p,
727 memory_order __m = memory_order_seq_cst) noexcept
729 __glibcxx_assert(__m != memory_order_acquire);
730 __glibcxx_assert(__m != memory_order_acq_rel);
731 __glibcxx_assert(__m != memory_order_consume);
733 __atomic_store_n(&_M_p, __p, __m);
737 store(__pointer_type __p,
738 memory_order __m = memory_order_seq_cst) volatile noexcept
740 __glibcxx_assert(__m != memory_order_acquire);
741 __glibcxx_assert(__m != memory_order_acq_rel);
742 __glibcxx_assert(__m != memory_order_consume);
744 __atomic_store_n(&_M_p, __p, __m);
748 load(memory_order __m = memory_order_seq_cst) const noexcept
750 __glibcxx_assert(__m != memory_order_release);
751 __glibcxx_assert(__m != memory_order_acq_rel);
753 return __atomic_load_n(&_M_p, __m);
757 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
759 __glibcxx_assert(__m != memory_order_release);
760 __glibcxx_assert(__m != memory_order_acq_rel);
762 return __atomic_load_n(&_M_p, __m);
766 exchange(__pointer_type __p,
767 memory_order __m = memory_order_seq_cst) noexcept
769 return __atomic_exchange_n(&_M_p, __p, __m);
774 exchange(__pointer_type __p,
775 memory_order __m = memory_order_seq_cst) volatile noexcept
777 return __atomic_exchange_n(&_M_p, __p, __m);
781 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
783 memory_order __m2) noexcept
785 __glibcxx_assert(__m2 != memory_order_release);
786 __glibcxx_assert(__m2 != memory_order_acq_rel);
787 __glibcxx_assert(__m2 <= __m1);
789 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
793 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
795 memory_order __m2) volatile noexcept
797 __glibcxx_assert(__m2 != memory_order_release);
798 __glibcxx_assert(__m2 != memory_order_acq_rel);
799 __glibcxx_assert(__m2 <= __m1);
801 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
805 fetch_add(ptrdiff_t __d,
806 memory_order __m = memory_order_seq_cst) noexcept
807 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
810 fetch_add(ptrdiff_t __d,
811 memory_order __m = memory_order_seq_cst) volatile noexcept
812 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
815 fetch_sub(ptrdiff_t __d,
816 memory_order __m = memory_order_seq_cst) noexcept
817 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
820 fetch_sub(ptrdiff_t __d,
821 memory_order __m = memory_order_seq_cst) volatile noexcept
822 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
827 _GLIBCXX_END_NAMESPACE_VERSION