re PR libstdc++/63775 ([C++11] Regex range with leading dash (-) not working)
[gcc.git] / libstdc++-v3 / include / bits / atomic_base.h
1 // -*- C++ -*- header.
2
3 // Copyright (C) 2008-2014 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24
25 /** @file bits/atomic_base.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly. @headername{atomic}
28 */
29
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
32
33 #pragma GCC system_header
34
35 #include <bits/c++config.h>
36 #include <stdint.h>
37 #include <bits/atomic_lockfree_defines.h>
38
39 #ifndef _GLIBCXX_ALWAYS_INLINE
40 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((always_inline))
41 #endif
42
43 namespace std _GLIBCXX_VISIBILITY(default)
44 {
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
46
47 /**
48 * @defgroup atomics Atomics
49 *
50 * Components for performing atomic operations.
51 * @{
52 */
53
54 /// Enumeration for memory_order
55 typedef enum memory_order
56 {
57 memory_order_relaxed,
58 memory_order_consume,
59 memory_order_acquire,
60 memory_order_release,
61 memory_order_acq_rel,
62 memory_order_seq_cst
63 } memory_order;
64
65 enum __memory_order_modifier
66 {
67 __memory_order_mask = 0x0ffff,
68 __memory_order_modifier_mask = 0xffff0000,
69 __memory_order_hle_acquire = 0x10000,
70 __memory_order_hle_release = 0x20000
71 };
72
73 constexpr memory_order
74 operator|(memory_order __m, __memory_order_modifier __mod)
75 {
76 return memory_order(__m | int(__mod));
77 }
78
79 constexpr memory_order
80 operator&(memory_order __m, __memory_order_modifier __mod)
81 {
82 return memory_order(__m & int(__mod));
83 }
84
85 // Drop release ordering as per [atomics.types.operations.req]/21
86 constexpr memory_order
87 __cmpexch_failure_order2(memory_order __m) noexcept
88 {
89 return __m == memory_order_acq_rel ? memory_order_acquire
90 : __m == memory_order_release ? memory_order_relaxed : __m;
91 }
92
93 constexpr memory_order
94 __cmpexch_failure_order(memory_order __m) noexcept
95 {
96 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
97 | (__m & __memory_order_modifier_mask));
98 }
99
100 _GLIBCXX_ALWAYS_INLINE void
101 atomic_thread_fence(memory_order __m) noexcept
102 { __atomic_thread_fence(__m); }
103
104 _GLIBCXX_ALWAYS_INLINE void
105 atomic_signal_fence(memory_order __m) noexcept
106 { __atomic_signal_fence(__m); }
107
108 /// kill_dependency
109 template<typename _Tp>
110 inline _Tp
111 kill_dependency(_Tp __y) noexcept
112 {
113 _Tp __ret(__y);
114 return __ret;
115 }
116
117
118 // Base types for atomics.
119 template<typename _IntTp>
120 struct __atomic_base;
121
122 /// atomic_char
123 typedef __atomic_base<char> atomic_char;
124
125 /// atomic_schar
126 typedef __atomic_base<signed char> atomic_schar;
127
128 /// atomic_uchar
129 typedef __atomic_base<unsigned char> atomic_uchar;
130
131 /// atomic_short
132 typedef __atomic_base<short> atomic_short;
133
134 /// atomic_ushort
135 typedef __atomic_base<unsigned short> atomic_ushort;
136
137 /// atomic_int
138 typedef __atomic_base<int> atomic_int;
139
140 /// atomic_uint
141 typedef __atomic_base<unsigned int> atomic_uint;
142
143 /// atomic_long
144 typedef __atomic_base<long> atomic_long;
145
146 /// atomic_ulong
147 typedef __atomic_base<unsigned long> atomic_ulong;
148
149 /// atomic_llong
150 typedef __atomic_base<long long> atomic_llong;
151
152 /// atomic_ullong
153 typedef __atomic_base<unsigned long long> atomic_ullong;
154
155 /// atomic_wchar_t
156 typedef __atomic_base<wchar_t> atomic_wchar_t;
157
158 /// atomic_char16_t
159 typedef __atomic_base<char16_t> atomic_char16_t;
160
161 /// atomic_char32_t
162 typedef __atomic_base<char32_t> atomic_char32_t;
163
164 /// atomic_char32_t
165 typedef __atomic_base<char32_t> atomic_char32_t;
166
167
168 /// atomic_int_least8_t
169 typedef __atomic_base<int_least8_t> atomic_int_least8_t;
170
171 /// atomic_uint_least8_t
172 typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
173
174 /// atomic_int_least16_t
175 typedef __atomic_base<int_least16_t> atomic_int_least16_t;
176
177 /// atomic_uint_least16_t
178 typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
179
180 /// atomic_int_least32_t
181 typedef __atomic_base<int_least32_t> atomic_int_least32_t;
182
183 /// atomic_uint_least32_t
184 typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
185
186 /// atomic_int_least64_t
187 typedef __atomic_base<int_least64_t> atomic_int_least64_t;
188
189 /// atomic_uint_least64_t
190 typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
191
192
193 /// atomic_int_fast8_t
194 typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
195
196 /// atomic_uint_fast8_t
197 typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
198
199 /// atomic_int_fast16_t
200 typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
201
202 /// atomic_uint_fast16_t
203 typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
204
205 /// atomic_int_fast32_t
206 typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
207
208 /// atomic_uint_fast32_t
209 typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
210
211 /// atomic_int_fast64_t
212 typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
213
214 /// atomic_uint_fast64_t
215 typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
216
217
218 /// atomic_intptr_t
219 typedef __atomic_base<intptr_t> atomic_intptr_t;
220
221 /// atomic_uintptr_t
222 typedef __atomic_base<uintptr_t> atomic_uintptr_t;
223
224 /// atomic_size_t
225 typedef __atomic_base<size_t> atomic_size_t;
226
227 /// atomic_intmax_t
228 typedef __atomic_base<intmax_t> atomic_intmax_t;
229
230 /// atomic_uintmax_t
231 typedef __atomic_base<uintmax_t> atomic_uintmax_t;
232
233 /// atomic_ptrdiff_t
234 typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
235
236
237 #define ATOMIC_VAR_INIT(_VI) { _VI }
238
239 template<typename _Tp>
240 struct atomic;
241
242 template<typename _Tp>
243 struct atomic<_Tp*>;
244
245 /* The target's "set" value for test-and-set may not be exactly 1. */
246 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
247 typedef bool __atomic_flag_data_type;
248 #else
249 typedef unsigned char __atomic_flag_data_type;
250 #endif
251
252 /**
253 * @brief Base type for atomic_flag.
254 *
255 * Base type is POD with data, allowing atomic_flag to derive from
256 * it and meet the standard layout type requirement. In addition to
257 * compatibility with a C interface, this allows different
258 * implementations of atomic_flag to use the same atomic operation
259 * functions, via a standard conversion to the __atomic_flag_base
260 * argument.
261 */
262 _GLIBCXX_BEGIN_EXTERN_C
263
264 struct __atomic_flag_base
265 {
266 __atomic_flag_data_type _M_i;
267 };
268
269 _GLIBCXX_END_EXTERN_C
270
271 #define ATOMIC_FLAG_INIT { 0 }
272
273 /// atomic_flag
274 struct atomic_flag : public __atomic_flag_base
275 {
276 atomic_flag() noexcept = default;
277 ~atomic_flag() noexcept = default;
278 atomic_flag(const atomic_flag&) = delete;
279 atomic_flag& operator=(const atomic_flag&) = delete;
280 atomic_flag& operator=(const atomic_flag&) volatile = delete;
281
282 // Conversion to ATOMIC_FLAG_INIT.
283 constexpr atomic_flag(bool __i) noexcept
284 : __atomic_flag_base{ _S_init(__i) }
285 { }
286
287 _GLIBCXX_ALWAYS_INLINE bool
288 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
289 {
290 return __atomic_test_and_set (&_M_i, __m);
291 }
292
293 _GLIBCXX_ALWAYS_INLINE bool
294 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
295 {
296 return __atomic_test_and_set (&_M_i, __m);
297 }
298
299 _GLIBCXX_ALWAYS_INLINE void
300 clear(memory_order __m = memory_order_seq_cst) noexcept
301 {
302 memory_order __b = __m & __memory_order_mask;
303 __glibcxx_assert(__b != memory_order_consume);
304 __glibcxx_assert(__b != memory_order_acquire);
305 __glibcxx_assert(__b != memory_order_acq_rel);
306
307 __atomic_clear (&_M_i, __m);
308 }
309
310 _GLIBCXX_ALWAYS_INLINE void
311 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
312 {
313 memory_order __b = __m & __memory_order_mask;
314 __glibcxx_assert(__b != memory_order_consume);
315 __glibcxx_assert(__b != memory_order_acquire);
316 __glibcxx_assert(__b != memory_order_acq_rel);
317
318 __atomic_clear (&_M_i, __m);
319 }
320
321 private:
322 static constexpr __atomic_flag_data_type
323 _S_init(bool __i)
324 { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
325 };
326
327
328 /// Base class for atomic integrals.
329 //
330 // For each of the integral types, define atomic_[integral type] struct
331 //
332 // atomic_bool bool
333 // atomic_char char
334 // atomic_schar signed char
335 // atomic_uchar unsigned char
336 // atomic_short short
337 // atomic_ushort unsigned short
338 // atomic_int int
339 // atomic_uint unsigned int
340 // atomic_long long
341 // atomic_ulong unsigned long
342 // atomic_llong long long
343 // atomic_ullong unsigned long long
344 // atomic_char16_t char16_t
345 // atomic_char32_t char32_t
346 // atomic_wchar_t wchar_t
347 //
348 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
349 // 8 bytes, since that is what GCC built-in functions for atomic
350 // memory access expect.
351 template<typename _ITp>
352 struct __atomic_base
353 {
354 private:
355 typedef _ITp __int_type;
356
357 __int_type _M_i;
358
359 public:
360 __atomic_base() noexcept = default;
361 ~__atomic_base() noexcept = default;
362 __atomic_base(const __atomic_base&) = delete;
363 __atomic_base& operator=(const __atomic_base&) = delete;
364 __atomic_base& operator=(const __atomic_base&) volatile = delete;
365
366 // Requires __int_type convertible to _M_i.
367 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
368
369 operator __int_type() const noexcept
370 { return load(); }
371
372 operator __int_type() const volatile noexcept
373 { return load(); }
374
375 __int_type
376 operator=(__int_type __i) noexcept
377 {
378 store(__i);
379 return __i;
380 }
381
382 __int_type
383 operator=(__int_type __i) volatile noexcept
384 {
385 store(__i);
386 return __i;
387 }
388
389 __int_type
390 operator++(int) noexcept
391 { return fetch_add(1); }
392
393 __int_type
394 operator++(int) volatile noexcept
395 { return fetch_add(1); }
396
397 __int_type
398 operator--(int) noexcept
399 { return fetch_sub(1); }
400
401 __int_type
402 operator--(int) volatile noexcept
403 { return fetch_sub(1); }
404
405 __int_type
406 operator++() noexcept
407 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
408
409 __int_type
410 operator++() volatile noexcept
411 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
412
413 __int_type
414 operator--() noexcept
415 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
416
417 __int_type
418 operator--() volatile noexcept
419 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
420
421 __int_type
422 operator+=(__int_type __i) noexcept
423 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
424
425 __int_type
426 operator+=(__int_type __i) volatile noexcept
427 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
428
429 __int_type
430 operator-=(__int_type __i) noexcept
431 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
432
433 __int_type
434 operator-=(__int_type __i) volatile noexcept
435 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
436
437 __int_type
438 operator&=(__int_type __i) noexcept
439 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
440
441 __int_type
442 operator&=(__int_type __i) volatile noexcept
443 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
444
445 __int_type
446 operator|=(__int_type __i) noexcept
447 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
448
449 __int_type
450 operator|=(__int_type __i) volatile noexcept
451 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
452
453 __int_type
454 operator^=(__int_type __i) noexcept
455 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
456
457 __int_type
458 operator^=(__int_type __i) volatile noexcept
459 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
460
461 bool
462 is_lock_free() const noexcept
463 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
464
465 bool
466 is_lock_free() const volatile noexcept
467 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
468
469 _GLIBCXX_ALWAYS_INLINE void
470 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
471 {
472 memory_order __b = __m & __memory_order_mask;
473 __glibcxx_assert(__b != memory_order_acquire);
474 __glibcxx_assert(__b != memory_order_acq_rel);
475 __glibcxx_assert(__b != memory_order_consume);
476
477 __atomic_store_n(&_M_i, __i, __m);
478 }
479
480 _GLIBCXX_ALWAYS_INLINE void
481 store(__int_type __i,
482 memory_order __m = memory_order_seq_cst) volatile noexcept
483 {
484 memory_order __b = __m & __memory_order_mask;
485 __glibcxx_assert(__b != memory_order_acquire);
486 __glibcxx_assert(__b != memory_order_acq_rel);
487 __glibcxx_assert(__b != memory_order_consume);
488
489 __atomic_store_n(&_M_i, __i, __m);
490 }
491
492 _GLIBCXX_ALWAYS_INLINE __int_type
493 load(memory_order __m = memory_order_seq_cst) const noexcept
494 {
495 memory_order __b = __m & __memory_order_mask;
496 __glibcxx_assert(__b != memory_order_release);
497 __glibcxx_assert(__b != memory_order_acq_rel);
498
499 return __atomic_load_n(&_M_i, __m);
500 }
501
502 _GLIBCXX_ALWAYS_INLINE __int_type
503 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
504 {
505 memory_order __b = __m & __memory_order_mask;
506 __glibcxx_assert(__b != memory_order_release);
507 __glibcxx_assert(__b != memory_order_acq_rel);
508
509 return __atomic_load_n(&_M_i, __m);
510 }
511
512 _GLIBCXX_ALWAYS_INLINE __int_type
513 exchange(__int_type __i,
514 memory_order __m = memory_order_seq_cst) noexcept
515 {
516 return __atomic_exchange_n(&_M_i, __i, __m);
517 }
518
519
520 _GLIBCXX_ALWAYS_INLINE __int_type
521 exchange(__int_type __i,
522 memory_order __m = memory_order_seq_cst) volatile noexcept
523 {
524 return __atomic_exchange_n(&_M_i, __i, __m);
525 }
526
527 _GLIBCXX_ALWAYS_INLINE bool
528 compare_exchange_weak(__int_type& __i1, __int_type __i2,
529 memory_order __m1, memory_order __m2) noexcept
530 {
531 memory_order __b2 = __m2 & __memory_order_mask;
532 memory_order __b1 = __m1 & __memory_order_mask;
533 __glibcxx_assert(__b2 != memory_order_release);
534 __glibcxx_assert(__b2 != memory_order_acq_rel);
535 __glibcxx_assert(__b2 <= __b1);
536
537 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
538 }
539
540 _GLIBCXX_ALWAYS_INLINE bool
541 compare_exchange_weak(__int_type& __i1, __int_type __i2,
542 memory_order __m1,
543 memory_order __m2) volatile noexcept
544 {
545 memory_order __b2 = __m2 & __memory_order_mask;
546 memory_order __b1 = __m1 & __memory_order_mask;
547 __glibcxx_assert(__b2 != memory_order_release);
548 __glibcxx_assert(__b2 != memory_order_acq_rel);
549 __glibcxx_assert(__b2 <= __b1);
550
551 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
552 }
553
554 _GLIBCXX_ALWAYS_INLINE bool
555 compare_exchange_weak(__int_type& __i1, __int_type __i2,
556 memory_order __m = memory_order_seq_cst) noexcept
557 {
558 return compare_exchange_weak(__i1, __i2, __m,
559 __cmpexch_failure_order(__m));
560 }
561
562 _GLIBCXX_ALWAYS_INLINE bool
563 compare_exchange_weak(__int_type& __i1, __int_type __i2,
564 memory_order __m = memory_order_seq_cst) volatile noexcept
565 {
566 return compare_exchange_weak(__i1, __i2, __m,
567 __cmpexch_failure_order(__m));
568 }
569
570 _GLIBCXX_ALWAYS_INLINE bool
571 compare_exchange_strong(__int_type& __i1, __int_type __i2,
572 memory_order __m1, memory_order __m2) noexcept
573 {
574 memory_order __b2 = __m2 & __memory_order_mask;
575 memory_order __b1 = __m1 & __memory_order_mask;
576 __glibcxx_assert(__b2 != memory_order_release);
577 __glibcxx_assert(__b2 != memory_order_acq_rel);
578 __glibcxx_assert(__b2 <= __b1);
579
580 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
581 }
582
583 _GLIBCXX_ALWAYS_INLINE bool
584 compare_exchange_strong(__int_type& __i1, __int_type __i2,
585 memory_order __m1,
586 memory_order __m2) volatile noexcept
587 {
588 memory_order __b2 = __m2 & __memory_order_mask;
589 memory_order __b1 = __m1 & __memory_order_mask;
590
591 __glibcxx_assert(__b2 != memory_order_release);
592 __glibcxx_assert(__b2 != memory_order_acq_rel);
593 __glibcxx_assert(__b2 <= __b1);
594
595 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
596 }
597
598 _GLIBCXX_ALWAYS_INLINE bool
599 compare_exchange_strong(__int_type& __i1, __int_type __i2,
600 memory_order __m = memory_order_seq_cst) noexcept
601 {
602 return compare_exchange_strong(__i1, __i2, __m,
603 __cmpexch_failure_order(__m));
604 }
605
606 _GLIBCXX_ALWAYS_INLINE bool
607 compare_exchange_strong(__int_type& __i1, __int_type __i2,
608 memory_order __m = memory_order_seq_cst) volatile noexcept
609 {
610 return compare_exchange_strong(__i1, __i2, __m,
611 __cmpexch_failure_order(__m));
612 }
613
614 _GLIBCXX_ALWAYS_INLINE __int_type
615 fetch_add(__int_type __i,
616 memory_order __m = memory_order_seq_cst) noexcept
617 { return __atomic_fetch_add(&_M_i, __i, __m); }
618
619 _GLIBCXX_ALWAYS_INLINE __int_type
620 fetch_add(__int_type __i,
621 memory_order __m = memory_order_seq_cst) volatile noexcept
622 { return __atomic_fetch_add(&_M_i, __i, __m); }
623
624 _GLIBCXX_ALWAYS_INLINE __int_type
625 fetch_sub(__int_type __i,
626 memory_order __m = memory_order_seq_cst) noexcept
627 { return __atomic_fetch_sub(&_M_i, __i, __m); }
628
629 _GLIBCXX_ALWAYS_INLINE __int_type
630 fetch_sub(__int_type __i,
631 memory_order __m = memory_order_seq_cst) volatile noexcept
632 { return __atomic_fetch_sub(&_M_i, __i, __m); }
633
634 _GLIBCXX_ALWAYS_INLINE __int_type
635 fetch_and(__int_type __i,
636 memory_order __m = memory_order_seq_cst) noexcept
637 { return __atomic_fetch_and(&_M_i, __i, __m); }
638
639 _GLIBCXX_ALWAYS_INLINE __int_type
640 fetch_and(__int_type __i,
641 memory_order __m = memory_order_seq_cst) volatile noexcept
642 { return __atomic_fetch_and(&_M_i, __i, __m); }
643
644 _GLIBCXX_ALWAYS_INLINE __int_type
645 fetch_or(__int_type __i,
646 memory_order __m = memory_order_seq_cst) noexcept
647 { return __atomic_fetch_or(&_M_i, __i, __m); }
648
649 _GLIBCXX_ALWAYS_INLINE __int_type
650 fetch_or(__int_type __i,
651 memory_order __m = memory_order_seq_cst) volatile noexcept
652 { return __atomic_fetch_or(&_M_i, __i, __m); }
653
654 _GLIBCXX_ALWAYS_INLINE __int_type
655 fetch_xor(__int_type __i,
656 memory_order __m = memory_order_seq_cst) noexcept
657 { return __atomic_fetch_xor(&_M_i, __i, __m); }
658
659 _GLIBCXX_ALWAYS_INLINE __int_type
660 fetch_xor(__int_type __i,
661 memory_order __m = memory_order_seq_cst) volatile noexcept
662 { return __atomic_fetch_xor(&_M_i, __i, __m); }
663 };
664
665
666 /// Partial specialization for pointer types.
667 template<typename _PTp>
668 struct __atomic_base<_PTp*>
669 {
670 private:
671 typedef _PTp* __pointer_type;
672
673 __pointer_type _M_p;
674
675 // Factored out to facilitate explicit specialization.
676 constexpr ptrdiff_t
677 _M_type_size(ptrdiff_t __d) const { return __d * sizeof(_PTp); }
678
679 constexpr ptrdiff_t
680 _M_type_size(ptrdiff_t __d) const volatile { return __d * sizeof(_PTp); }
681
682 public:
683 __atomic_base() noexcept = default;
684 ~__atomic_base() noexcept = default;
685 __atomic_base(const __atomic_base&) = delete;
686 __atomic_base& operator=(const __atomic_base&) = delete;
687 __atomic_base& operator=(const __atomic_base&) volatile = delete;
688
689 // Requires __pointer_type convertible to _M_p.
690 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
691
692 operator __pointer_type() const noexcept
693 { return load(); }
694
695 operator __pointer_type() const volatile noexcept
696 { return load(); }
697
698 __pointer_type
699 operator=(__pointer_type __p) noexcept
700 {
701 store(__p);
702 return __p;
703 }
704
705 __pointer_type
706 operator=(__pointer_type __p) volatile noexcept
707 {
708 store(__p);
709 return __p;
710 }
711
712 __pointer_type
713 operator++(int) noexcept
714 { return fetch_add(1); }
715
716 __pointer_type
717 operator++(int) volatile noexcept
718 { return fetch_add(1); }
719
720 __pointer_type
721 operator--(int) noexcept
722 { return fetch_sub(1); }
723
724 __pointer_type
725 operator--(int) volatile noexcept
726 { return fetch_sub(1); }
727
728 __pointer_type
729 operator++() noexcept
730 { return __atomic_add_fetch(&_M_p, _M_type_size(1),
731 memory_order_seq_cst); }
732
733 __pointer_type
734 operator++() volatile noexcept
735 { return __atomic_add_fetch(&_M_p, _M_type_size(1),
736 memory_order_seq_cst); }
737
738 __pointer_type
739 operator--() noexcept
740 { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
741 memory_order_seq_cst); }
742
743 __pointer_type
744 operator--() volatile noexcept
745 { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
746 memory_order_seq_cst); }
747
748 __pointer_type
749 operator+=(ptrdiff_t __d) noexcept
750 { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
751 memory_order_seq_cst); }
752
753 __pointer_type
754 operator+=(ptrdiff_t __d) volatile noexcept
755 { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
756 memory_order_seq_cst); }
757
758 __pointer_type
759 operator-=(ptrdiff_t __d) noexcept
760 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
761 memory_order_seq_cst); }
762
763 __pointer_type
764 operator-=(ptrdiff_t __d) volatile noexcept
765 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
766 memory_order_seq_cst); }
767
768 bool
769 is_lock_free() const noexcept
770 { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
771
772 bool
773 is_lock_free() const volatile noexcept
774 { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
775
776 _GLIBCXX_ALWAYS_INLINE void
777 store(__pointer_type __p,
778 memory_order __m = memory_order_seq_cst) noexcept
779 {
780 memory_order __b = __m & __memory_order_mask;
781
782 __glibcxx_assert(__b != memory_order_acquire);
783 __glibcxx_assert(__b != memory_order_acq_rel);
784 __glibcxx_assert(__b != memory_order_consume);
785
786 __atomic_store_n(&_M_p, __p, __m);
787 }
788
789 _GLIBCXX_ALWAYS_INLINE void
790 store(__pointer_type __p,
791 memory_order __m = memory_order_seq_cst) volatile noexcept
792 {
793 memory_order __b = __m & __memory_order_mask;
794 __glibcxx_assert(__b != memory_order_acquire);
795 __glibcxx_assert(__b != memory_order_acq_rel);
796 __glibcxx_assert(__b != memory_order_consume);
797
798 __atomic_store_n(&_M_p, __p, __m);
799 }
800
801 _GLIBCXX_ALWAYS_INLINE __pointer_type
802 load(memory_order __m = memory_order_seq_cst) const noexcept
803 {
804 memory_order __b = __m & __memory_order_mask;
805 __glibcxx_assert(__b != memory_order_release);
806 __glibcxx_assert(__b != memory_order_acq_rel);
807
808 return __atomic_load_n(&_M_p, __m);
809 }
810
811 _GLIBCXX_ALWAYS_INLINE __pointer_type
812 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
813 {
814 memory_order __b = __m & __memory_order_mask;
815 __glibcxx_assert(__b != memory_order_release);
816 __glibcxx_assert(__b != memory_order_acq_rel);
817
818 return __atomic_load_n(&_M_p, __m);
819 }
820
821 _GLIBCXX_ALWAYS_INLINE __pointer_type
822 exchange(__pointer_type __p,
823 memory_order __m = memory_order_seq_cst) noexcept
824 {
825 return __atomic_exchange_n(&_M_p, __p, __m);
826 }
827
828
829 _GLIBCXX_ALWAYS_INLINE __pointer_type
830 exchange(__pointer_type __p,
831 memory_order __m = memory_order_seq_cst) volatile noexcept
832 {
833 return __atomic_exchange_n(&_M_p, __p, __m);
834 }
835
836 _GLIBCXX_ALWAYS_INLINE bool
837 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
838 memory_order __m1,
839 memory_order __m2) noexcept
840 {
841 memory_order __b2 = __m2 & __memory_order_mask;
842 memory_order __b1 = __m1 & __memory_order_mask;
843 __glibcxx_assert(__b2 != memory_order_release);
844 __glibcxx_assert(__b2 != memory_order_acq_rel);
845 __glibcxx_assert(__b2 <= __b1);
846
847 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
848 }
849
850 _GLIBCXX_ALWAYS_INLINE bool
851 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
852 memory_order __m1,
853 memory_order __m2) volatile noexcept
854 {
855 memory_order __b2 = __m2 & __memory_order_mask;
856 memory_order __b1 = __m1 & __memory_order_mask;
857
858 __glibcxx_assert(__b2 != memory_order_release);
859 __glibcxx_assert(__b2 != memory_order_acq_rel);
860 __glibcxx_assert(__b2 <= __b1);
861
862 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
863 }
864
865 _GLIBCXX_ALWAYS_INLINE __pointer_type
866 fetch_add(ptrdiff_t __d,
867 memory_order __m = memory_order_seq_cst) noexcept
868 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
869
870 _GLIBCXX_ALWAYS_INLINE __pointer_type
871 fetch_add(ptrdiff_t __d,
872 memory_order __m = memory_order_seq_cst) volatile noexcept
873 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
874
875 _GLIBCXX_ALWAYS_INLINE __pointer_type
876 fetch_sub(ptrdiff_t __d,
877 memory_order __m = memory_order_seq_cst) noexcept
878 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
879
880 _GLIBCXX_ALWAYS_INLINE __pointer_type
881 fetch_sub(ptrdiff_t __d,
882 memory_order __m = memory_order_seq_cst) volatile noexcept
883 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
884 };
885
886 // @} group atomics
887
888 _GLIBCXX_END_NAMESPACE_VERSION
889 } // namespace std
890
891 #endif