3 // Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/atomic_base.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly. @headername{atomic}
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
35 #include <bits/c++config.h>
37 #include <bits/atomic_lockfree_defines.h>
39 #ifndef _GLIBCXX_ALWAYS_INLINE
40 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__))
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
48 * @defgroup atomics Atomics
50 * Components for performing atomic operations.
54 /// Enumeration for memory_order
55 typedef enum memory_order
65 enum __memory_order_modifier
67 __memory_order_mask = 0x0ffff,
68 __memory_order_modifier_mask = 0xffff0000,
69 __memory_order_hle_acquire = 0x10000,
70 __memory_order_hle_release = 0x20000
73 constexpr memory_order
74 operator|(memory_order __m, __memory_order_modifier __mod)
76 return memory_order(__m | int(__mod));
79 constexpr memory_order
80 operator&(memory_order __m, __memory_order_modifier __mod)
82 return memory_order(__m & int(__mod));
85 // Drop release ordering as per [atomics.types.operations.req]/21
86 constexpr memory_order
87 __cmpexch_failure_order2(memory_order __m) noexcept
89 return __m == memory_order_acq_rel ? memory_order_acquire
90 : __m == memory_order_release ? memory_order_relaxed : __m;
93 constexpr memory_order
94 __cmpexch_failure_order(memory_order __m) noexcept
96 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
97 | (__m & __memory_order_modifier_mask));
100 _GLIBCXX_ALWAYS_INLINE void
101 atomic_thread_fence(memory_order __m) noexcept
102 { __atomic_thread_fence(__m); }
104 _GLIBCXX_ALWAYS_INLINE void
105 atomic_signal_fence(memory_order __m) noexcept
106 { __atomic_signal_fence(__m); }
109 template<typename _Tp>
111 kill_dependency(_Tp __y) noexcept
118 // Base types for atomics.
119 template<typename _IntTp>
120 struct __atomic_base;
123 #define ATOMIC_VAR_INIT(_VI) { _VI }
125 template<typename _Tp>
128 template<typename _Tp>
131 /* The target's "set" value for test-and-set may not be exactly 1. */
132 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
133 typedef bool __atomic_flag_data_type;
135 typedef unsigned char __atomic_flag_data_type;
139 * @brief Base type for atomic_flag.
141 * Base type is POD with data, allowing atomic_flag to derive from
142 * it and meet the standard layout type requirement. In addition to
143 * compatibility with a C interface, this allows different
144 * implementations of atomic_flag to use the same atomic operation
145 * functions, via a standard conversion to the __atomic_flag_base
148 _GLIBCXX_BEGIN_EXTERN_C
150 struct __atomic_flag_base
152 __atomic_flag_data_type _M_i;
155 _GLIBCXX_END_EXTERN_C
157 #define ATOMIC_FLAG_INIT { 0 }
160 struct atomic_flag : public __atomic_flag_base
162 atomic_flag() noexcept = default;
163 ~atomic_flag() noexcept = default;
164 atomic_flag(const atomic_flag&) = delete;
165 atomic_flag& operator=(const atomic_flag&) = delete;
166 atomic_flag& operator=(const atomic_flag&) volatile = delete;
168 // Conversion to ATOMIC_FLAG_INIT.
169 constexpr atomic_flag(bool __i) noexcept
170 : __atomic_flag_base{ _S_init(__i) }
173 _GLIBCXX_ALWAYS_INLINE bool
174 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
176 return __atomic_test_and_set (&_M_i, __m);
179 _GLIBCXX_ALWAYS_INLINE bool
180 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
182 return __atomic_test_and_set (&_M_i, __m);
185 _GLIBCXX_ALWAYS_INLINE void
186 clear(memory_order __m = memory_order_seq_cst) noexcept
188 memory_order __b = __m & __memory_order_mask;
189 __glibcxx_assert(__b != memory_order_consume);
190 __glibcxx_assert(__b != memory_order_acquire);
191 __glibcxx_assert(__b != memory_order_acq_rel);
193 __atomic_clear (&_M_i, __m);
196 _GLIBCXX_ALWAYS_INLINE void
197 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
199 memory_order __b = __m & __memory_order_mask;
200 __glibcxx_assert(__b != memory_order_consume);
201 __glibcxx_assert(__b != memory_order_acquire);
202 __glibcxx_assert(__b != memory_order_acq_rel);
204 __atomic_clear (&_M_i, __m);
208 static constexpr __atomic_flag_data_type
210 { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
214 /// Base class for atomic integrals.
216 // For each of the integral types, define atomic_[integral type] struct
220 // atomic_schar signed char
221 // atomic_uchar unsigned char
222 // atomic_short short
223 // atomic_ushort unsigned short
225 // atomic_uint unsigned int
227 // atomic_ulong unsigned long
228 // atomic_llong long long
229 // atomic_ullong unsigned long long
230 // atomic_char16_t char16_t
231 // atomic_char32_t char32_t
232 // atomic_wchar_t wchar_t
234 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
235 // 8 bytes, since that is what GCC built-in functions for atomic
236 // memory access expect.
237 template<typename _ITp>
241 typedef _ITp __int_type;
246 __atomic_base() noexcept = default;
247 ~__atomic_base() noexcept = default;
248 __atomic_base(const __atomic_base&) = delete;
249 __atomic_base& operator=(const __atomic_base&) = delete;
250 __atomic_base& operator=(const __atomic_base&) volatile = delete;
252 // Requires __int_type convertible to _M_i.
253 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
255 operator __int_type() const noexcept
258 operator __int_type() const volatile noexcept
262 operator=(__int_type __i) noexcept
269 operator=(__int_type __i) volatile noexcept
276 operator++(int) noexcept
277 { return fetch_add(1); }
280 operator++(int) volatile noexcept
281 { return fetch_add(1); }
284 operator--(int) noexcept
285 { return fetch_sub(1); }
288 operator--(int) volatile noexcept
289 { return fetch_sub(1); }
292 operator++() noexcept
293 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
296 operator++() volatile noexcept
297 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
300 operator--() noexcept
301 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
304 operator--() volatile noexcept
305 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
308 operator+=(__int_type __i) noexcept
309 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
312 operator+=(__int_type __i) volatile noexcept
313 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
316 operator-=(__int_type __i) noexcept
317 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
320 operator-=(__int_type __i) volatile noexcept
321 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
324 operator&=(__int_type __i) noexcept
325 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
328 operator&=(__int_type __i) volatile noexcept
329 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
332 operator|=(__int_type __i) noexcept
333 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
336 operator|=(__int_type __i) volatile noexcept
337 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
340 operator^=(__int_type __i) noexcept
341 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
344 operator^=(__int_type __i) volatile noexcept
345 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
348 is_lock_free() const noexcept
349 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
352 is_lock_free() const volatile noexcept
353 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
355 _GLIBCXX_ALWAYS_INLINE void
356 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
358 memory_order __b = __m & __memory_order_mask;
359 __glibcxx_assert(__b != memory_order_acquire);
360 __glibcxx_assert(__b != memory_order_acq_rel);
361 __glibcxx_assert(__b != memory_order_consume);
363 __atomic_store_n(&_M_i, __i, __m);
366 _GLIBCXX_ALWAYS_INLINE void
367 store(__int_type __i,
368 memory_order __m = memory_order_seq_cst) volatile noexcept
370 memory_order __b = __m & __memory_order_mask;
371 __glibcxx_assert(__b != memory_order_acquire);
372 __glibcxx_assert(__b != memory_order_acq_rel);
373 __glibcxx_assert(__b != memory_order_consume);
375 __atomic_store_n(&_M_i, __i, __m);
378 _GLIBCXX_ALWAYS_INLINE __int_type
379 load(memory_order __m = memory_order_seq_cst) const noexcept
381 memory_order __b = __m & __memory_order_mask;
382 __glibcxx_assert(__b != memory_order_release);
383 __glibcxx_assert(__b != memory_order_acq_rel);
385 return __atomic_load_n(&_M_i, __m);
388 _GLIBCXX_ALWAYS_INLINE __int_type
389 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
391 memory_order __b = __m & __memory_order_mask;
392 __glibcxx_assert(__b != memory_order_release);
393 __glibcxx_assert(__b != memory_order_acq_rel);
395 return __atomic_load_n(&_M_i, __m);
398 _GLIBCXX_ALWAYS_INLINE __int_type
399 exchange(__int_type __i,
400 memory_order __m = memory_order_seq_cst) noexcept
402 return __atomic_exchange_n(&_M_i, __i, __m);
406 _GLIBCXX_ALWAYS_INLINE __int_type
407 exchange(__int_type __i,
408 memory_order __m = memory_order_seq_cst) volatile noexcept
410 return __atomic_exchange_n(&_M_i, __i, __m);
413 _GLIBCXX_ALWAYS_INLINE bool
414 compare_exchange_weak(__int_type& __i1, __int_type __i2,
415 memory_order __m1, memory_order __m2) noexcept
417 memory_order __b2 = __m2 & __memory_order_mask;
418 memory_order __b1 = __m1 & __memory_order_mask;
419 __glibcxx_assert(__b2 != memory_order_release);
420 __glibcxx_assert(__b2 != memory_order_acq_rel);
421 __glibcxx_assert(__b2 <= __b1);
423 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
426 _GLIBCXX_ALWAYS_INLINE bool
427 compare_exchange_weak(__int_type& __i1, __int_type __i2,
429 memory_order __m2) volatile noexcept
431 memory_order __b2 = __m2 & __memory_order_mask;
432 memory_order __b1 = __m1 & __memory_order_mask;
433 __glibcxx_assert(__b2 != memory_order_release);
434 __glibcxx_assert(__b2 != memory_order_acq_rel);
435 __glibcxx_assert(__b2 <= __b1);
437 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
440 _GLIBCXX_ALWAYS_INLINE bool
441 compare_exchange_weak(__int_type& __i1, __int_type __i2,
442 memory_order __m = memory_order_seq_cst) noexcept
444 return compare_exchange_weak(__i1, __i2, __m,
445 __cmpexch_failure_order(__m));
448 _GLIBCXX_ALWAYS_INLINE bool
449 compare_exchange_weak(__int_type& __i1, __int_type __i2,
450 memory_order __m = memory_order_seq_cst) volatile noexcept
452 return compare_exchange_weak(__i1, __i2, __m,
453 __cmpexch_failure_order(__m));
456 _GLIBCXX_ALWAYS_INLINE bool
457 compare_exchange_strong(__int_type& __i1, __int_type __i2,
458 memory_order __m1, memory_order __m2) noexcept
460 memory_order __b2 = __m2 & __memory_order_mask;
461 memory_order __b1 = __m1 & __memory_order_mask;
462 __glibcxx_assert(__b2 != memory_order_release);
463 __glibcxx_assert(__b2 != memory_order_acq_rel);
464 __glibcxx_assert(__b2 <= __b1);
466 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
469 _GLIBCXX_ALWAYS_INLINE bool
470 compare_exchange_strong(__int_type& __i1, __int_type __i2,
472 memory_order __m2) volatile noexcept
474 memory_order __b2 = __m2 & __memory_order_mask;
475 memory_order __b1 = __m1 & __memory_order_mask;
477 __glibcxx_assert(__b2 != memory_order_release);
478 __glibcxx_assert(__b2 != memory_order_acq_rel);
479 __glibcxx_assert(__b2 <= __b1);
481 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
484 _GLIBCXX_ALWAYS_INLINE bool
485 compare_exchange_strong(__int_type& __i1, __int_type __i2,
486 memory_order __m = memory_order_seq_cst) noexcept
488 return compare_exchange_strong(__i1, __i2, __m,
489 __cmpexch_failure_order(__m));
492 _GLIBCXX_ALWAYS_INLINE bool
493 compare_exchange_strong(__int_type& __i1, __int_type __i2,
494 memory_order __m = memory_order_seq_cst) volatile noexcept
496 return compare_exchange_strong(__i1, __i2, __m,
497 __cmpexch_failure_order(__m));
500 _GLIBCXX_ALWAYS_INLINE __int_type
501 fetch_add(__int_type __i,
502 memory_order __m = memory_order_seq_cst) noexcept
503 { return __atomic_fetch_add(&_M_i, __i, __m); }
505 _GLIBCXX_ALWAYS_INLINE __int_type
506 fetch_add(__int_type __i,
507 memory_order __m = memory_order_seq_cst) volatile noexcept
508 { return __atomic_fetch_add(&_M_i, __i, __m); }
510 _GLIBCXX_ALWAYS_INLINE __int_type
511 fetch_sub(__int_type __i,
512 memory_order __m = memory_order_seq_cst) noexcept
513 { return __atomic_fetch_sub(&_M_i, __i, __m); }
515 _GLIBCXX_ALWAYS_INLINE __int_type
516 fetch_sub(__int_type __i,
517 memory_order __m = memory_order_seq_cst) volatile noexcept
518 { return __atomic_fetch_sub(&_M_i, __i, __m); }
520 _GLIBCXX_ALWAYS_INLINE __int_type
521 fetch_and(__int_type __i,
522 memory_order __m = memory_order_seq_cst) noexcept
523 { return __atomic_fetch_and(&_M_i, __i, __m); }
525 _GLIBCXX_ALWAYS_INLINE __int_type
526 fetch_and(__int_type __i,
527 memory_order __m = memory_order_seq_cst) volatile noexcept
528 { return __atomic_fetch_and(&_M_i, __i, __m); }
530 _GLIBCXX_ALWAYS_INLINE __int_type
531 fetch_or(__int_type __i,
532 memory_order __m = memory_order_seq_cst) noexcept
533 { return __atomic_fetch_or(&_M_i, __i, __m); }
535 _GLIBCXX_ALWAYS_INLINE __int_type
536 fetch_or(__int_type __i,
537 memory_order __m = memory_order_seq_cst) volatile noexcept
538 { return __atomic_fetch_or(&_M_i, __i, __m); }
540 _GLIBCXX_ALWAYS_INLINE __int_type
541 fetch_xor(__int_type __i,
542 memory_order __m = memory_order_seq_cst) noexcept
543 { return __atomic_fetch_xor(&_M_i, __i, __m); }
545 _GLIBCXX_ALWAYS_INLINE __int_type
546 fetch_xor(__int_type __i,
547 memory_order __m = memory_order_seq_cst) volatile noexcept
548 { return __atomic_fetch_xor(&_M_i, __i, __m); }
552 /// Partial specialization for pointer types.
553 template<typename _PTp>
554 struct __atomic_base<_PTp*>
557 typedef _PTp* __pointer_type;
561 // Factored out to facilitate explicit specialization.
563 _M_type_size(ptrdiff_t __d) const { return __d * sizeof(_PTp); }
566 _M_type_size(ptrdiff_t __d) const volatile { return __d * sizeof(_PTp); }
569 __atomic_base() noexcept = default;
570 ~__atomic_base() noexcept = default;
571 __atomic_base(const __atomic_base&) = delete;
572 __atomic_base& operator=(const __atomic_base&) = delete;
573 __atomic_base& operator=(const __atomic_base&) volatile = delete;
575 // Requires __pointer_type convertible to _M_p.
576 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
578 operator __pointer_type() const noexcept
581 operator __pointer_type() const volatile noexcept
585 operator=(__pointer_type __p) noexcept
592 operator=(__pointer_type __p) volatile noexcept
599 operator++(int) noexcept
600 { return fetch_add(1); }
603 operator++(int) volatile noexcept
604 { return fetch_add(1); }
607 operator--(int) noexcept
608 { return fetch_sub(1); }
611 operator--(int) volatile noexcept
612 { return fetch_sub(1); }
615 operator++() noexcept
616 { return __atomic_add_fetch(&_M_p, _M_type_size(1),
617 memory_order_seq_cst); }
620 operator++() volatile noexcept
621 { return __atomic_add_fetch(&_M_p, _M_type_size(1),
622 memory_order_seq_cst); }
625 operator--() noexcept
626 { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
627 memory_order_seq_cst); }
630 operator--() volatile noexcept
631 { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
632 memory_order_seq_cst); }
635 operator+=(ptrdiff_t __d) noexcept
636 { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
637 memory_order_seq_cst); }
640 operator+=(ptrdiff_t __d) volatile noexcept
641 { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
642 memory_order_seq_cst); }
645 operator-=(ptrdiff_t __d) noexcept
646 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
647 memory_order_seq_cst); }
650 operator-=(ptrdiff_t __d) volatile noexcept
651 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
652 memory_order_seq_cst); }
655 is_lock_free() const noexcept
656 { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
659 is_lock_free() const volatile noexcept
660 { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
662 _GLIBCXX_ALWAYS_INLINE void
663 store(__pointer_type __p,
664 memory_order __m = memory_order_seq_cst) noexcept
666 memory_order __b = __m & __memory_order_mask;
668 __glibcxx_assert(__b != memory_order_acquire);
669 __glibcxx_assert(__b != memory_order_acq_rel);
670 __glibcxx_assert(__b != memory_order_consume);
672 __atomic_store_n(&_M_p, __p, __m);
675 _GLIBCXX_ALWAYS_INLINE void
676 store(__pointer_type __p,
677 memory_order __m = memory_order_seq_cst) volatile noexcept
679 memory_order __b = __m & __memory_order_mask;
680 __glibcxx_assert(__b != memory_order_acquire);
681 __glibcxx_assert(__b != memory_order_acq_rel);
682 __glibcxx_assert(__b != memory_order_consume);
684 __atomic_store_n(&_M_p, __p, __m);
687 _GLIBCXX_ALWAYS_INLINE __pointer_type
688 load(memory_order __m = memory_order_seq_cst) const noexcept
690 memory_order __b = __m & __memory_order_mask;
691 __glibcxx_assert(__b != memory_order_release);
692 __glibcxx_assert(__b != memory_order_acq_rel);
694 return __atomic_load_n(&_M_p, __m);
697 _GLIBCXX_ALWAYS_INLINE __pointer_type
698 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
700 memory_order __b = __m & __memory_order_mask;
701 __glibcxx_assert(__b != memory_order_release);
702 __glibcxx_assert(__b != memory_order_acq_rel);
704 return __atomic_load_n(&_M_p, __m);
707 _GLIBCXX_ALWAYS_INLINE __pointer_type
708 exchange(__pointer_type __p,
709 memory_order __m = memory_order_seq_cst) noexcept
711 return __atomic_exchange_n(&_M_p, __p, __m);
715 _GLIBCXX_ALWAYS_INLINE __pointer_type
716 exchange(__pointer_type __p,
717 memory_order __m = memory_order_seq_cst) volatile noexcept
719 return __atomic_exchange_n(&_M_p, __p, __m);
722 _GLIBCXX_ALWAYS_INLINE bool
723 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
725 memory_order __m2) noexcept
727 memory_order __b2 = __m2 & __memory_order_mask;
728 memory_order __b1 = __m1 & __memory_order_mask;
729 __glibcxx_assert(__b2 != memory_order_release);
730 __glibcxx_assert(__b2 != memory_order_acq_rel);
731 __glibcxx_assert(__b2 <= __b1);
733 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
736 _GLIBCXX_ALWAYS_INLINE bool
737 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
739 memory_order __m2) volatile noexcept
741 memory_order __b2 = __m2 & __memory_order_mask;
742 memory_order __b1 = __m1 & __memory_order_mask;
744 __glibcxx_assert(__b2 != memory_order_release);
745 __glibcxx_assert(__b2 != memory_order_acq_rel);
746 __glibcxx_assert(__b2 <= __b1);
748 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
751 _GLIBCXX_ALWAYS_INLINE __pointer_type
752 fetch_add(ptrdiff_t __d,
753 memory_order __m = memory_order_seq_cst) noexcept
754 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
756 _GLIBCXX_ALWAYS_INLINE __pointer_type
757 fetch_add(ptrdiff_t __d,
758 memory_order __m = memory_order_seq_cst) volatile noexcept
759 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
761 _GLIBCXX_ALWAYS_INLINE __pointer_type
762 fetch_sub(ptrdiff_t __d,
763 memory_order __m = memory_order_seq_cst) noexcept
764 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
766 _GLIBCXX_ALWAYS_INLINE __pointer_type
767 fetch_sub(ptrdiff_t __d,
768 memory_order __m = memory_order_seq_cst) volatile noexcept
769 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
774 _GLIBCXX_END_NAMESPACE_VERSION