libcopp  1.2.1
atomic_int_type.h
Go to the documentation of this file.
1 
18 #ifndef UTIL_LOCK_ATOMIC_INT_TYPE_H
19 #define UTIL_LOCK_ATOMIC_INT_TYPE_H
20 
21 #pragma once
22 
23 #include "std/explicit_declare.h"
24 
25 #if __cplusplus >= 201103L
26 #include <cstdint>
27 #elif defined(_MSC_VER) && defined(_MSVC_LANG) && _MSVC_LANG >= 201402L
28 #include <cstdint>
29 #else
30 
31 // patch for old gcc
32 #ifndef __STDC_LIMIT_MACROS
33 #define _UNDEF__STDC_LIMIT_MACROS
34 #define __STDC_LIMIT_MACROS
35 #endif
36 #ifndef __STDC_CONSTANT_MACROS
37 #define _UNDEF__STDC_CONSTANT_MACROS
38 #define __STDC_CONSTANT_MACROS
39 #endif
40 #include <limits.h>
41 #include <stdint.h>
42 #ifdef _UNDEF__STDC_LIMIT_MACROS
43 #undef __STDC_LIMIT_MACROS
44 #undef _UNDEF__STDC_LIMIT_MACROS
45 #endif
46 #ifdef _UNDEF__STDC_CONSTANT_MACROS
47 #undef __STDC_CONSTANT_MACROS
48 #undef _UNDEF__STDC_CONSTANT_MACROS
49 #endif
50 
51 #endif
52 
53 #if defined(__cplusplus) && __cplusplus >= 201103L
54 
55 #include <atomic>
56 #define __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
57 
58 #elif defined(__clang__) && (__clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 1)) && __cplusplus >= 201103L
59 
60 #include <atomic>
61 #define __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
62 
63 #elif defined(_MSC_VER) && (_MSC_VER >= 1900) // 1900 means VC 14.0,2015, there some problem with std::atomic implement in old MSVC
64 
65 #include <atomic>
66 #define __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
67 
68 // There is a BUG in gcc 4.6, which will cause 'undefined reference to `std::atomic_thread_fence(std::memory_order)'
69 // In gcc 4.7 and upper, we can use -std=c++11 or upper
70 // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=51038
71 // #elif defined(__GNUC__) && ((__GNUC__ == 4 && __GNUC_MINOR__ >= 5) || __GNUC__ > 4) && defined(__GXX_EXPERIMENTAL_CXX0X__)
72 //
73 // #include <atomic>
74 // #define __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
75 
76 #endif
77 
78 #if !defined(__UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD) && defined(_MSC_VER)
80 #endif
81 
82 #include <cstddef>
83 
86 
87 namespace util {
88  namespace lock {
89 #ifdef __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
97 
98 #define UTIL_LOCK_ATOMIC_THREAD_FENCE(order) ::std::atomic_thread_fence(order)
99 #define UTIL_LOCK_ATOMIC_SIGNAL_FENCE(order) ::std::atomic_signal_fence(order)
100 
107  template <typename Ty = int>
108  class atomic_int_type {
109  public:
110  typedef Ty value_type;
111 
112  private:
113  ::std::atomic<value_type> data_;
114  atomic_int_type(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
115 #ifndef _MSC_VER
116  atomic_int_type &operator=(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
117  atomic_int_type &operator=(const atomic_int_type &) volatile UTIL_CONFIG_DELETED_FUNCTION;
118 #endif
119 
120 
121  public:
122  atomic_int_type() UTIL_CONFIG_NOEXCEPT : data_() {}
123  atomic_int_type(value_type desired) UTIL_CONFIG_NOEXCEPT : data_(desired) {}
124 
125  inline void store(value_type desired,
126  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
127  data_.store(desired, order);
128  }
129  inline void store(value_type desired,
130  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
131  data_.store(desired, order);
132  }
133 
134  inline value_type load(::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) const UTIL_CONFIG_NOEXCEPT {
135  return data_.load(order);
136  }
137  inline value_type load(::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) const
138  volatile UTIL_CONFIG_NOEXCEPT {
139  return data_.load(order);
140  }
141 
142  inline operator value_type() const UTIL_CONFIG_NOEXCEPT { return load(); }
143  inline operator value_type() const volatile UTIL_CONFIG_NOEXCEPT { return load(); }
144 
145  inline value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT {
146  store(desired);
147  return desired;
148  }
149  inline value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT {
150  store(desired);
151  return desired;
152  }
153 
154  inline value_type operator++() UTIL_CONFIG_NOEXCEPT { return ++data_; }
155  inline value_type operator++() volatile UTIL_CONFIG_NOEXCEPT { return ++data_; }
156  inline value_type operator++(int) UTIL_CONFIG_NOEXCEPT { return data_++; }
157  inline value_type operator++(int) volatile UTIL_CONFIG_NOEXCEPT { return data_++; }
158  inline value_type operator--() UTIL_CONFIG_NOEXCEPT { return --data_; }
159  inline value_type operator--() volatile UTIL_CONFIG_NOEXCEPT { return --data_; }
160  inline value_type operator--(int) UTIL_CONFIG_NOEXCEPT { return data_--; }
161  inline value_type operator--(int) volatile UTIL_CONFIG_NOEXCEPT { return data_--; }
162 
163  inline value_type exchange(value_type desired,
164  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
165  return data_.exchange(desired, order);
166  }
167  inline value_type
168  exchange(value_type desired,
169  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
170  return data_.exchange(desired, order);
171  }
172 
173  inline bool compare_exchange_weak(value_type &expected, value_type desired, ::util::lock::memory_order success,
174  ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
175  return data_.compare_exchange_weak(expected, desired, success, failure);
176  }
177  inline bool compare_exchange_weak(value_type &expected, value_type desired, ::util::lock::memory_order success,
178  ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
179  return data_.compare_exchange_weak(expected, desired, success, failure);
180  }
181 
182  inline bool compare_exchange_weak(value_type &expected, value_type desired,
183  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
184  return data_.compare_exchange_weak(expected, desired, order);
185  }
186  inline bool
187  compare_exchange_weak(value_type &expected, value_type desired,
188  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
189  return data_.compare_exchange_weak(expected, desired, order);
190  }
191 
192  inline bool compare_exchange_strong(value_type &expected, value_type desired, ::util::lock::memory_order success,
193  ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
194  return data_.compare_exchange_strong(expected, desired, success, failure);
195  }
196  inline bool compare_exchange_strong(value_type &expected, value_type desired, ::util::lock::memory_order success,
197  ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
198  return data_.compare_exchange_strong(expected, desired, success, failure);
199  }
200 
201  inline bool
202  compare_exchange_strong(value_type &expected, value_type desired,
203  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
204  return data_.compare_exchange_strong(expected, desired, order);
205  }
206  inline bool
207  compare_exchange_strong(value_type &expected, value_type desired,
208  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
209  return data_.compare_exchange_strong(expected, desired, order);
210  }
211 
212  inline value_type fetch_add(value_type arg,
213  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
214  return data_.fetch_add(arg, order);
215  }
216  inline value_type
217  fetch_add(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
218  return data_.fetch_add(arg, order);
219  }
220 
221  inline value_type fetch_sub(value_type arg,
222  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
223  return data_.fetch_sub(arg, order);
224  }
225  inline value_type
226  fetch_sub(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
227  return data_.fetch_sub(arg, order);
228  }
229 
230  inline value_type fetch_and(value_type arg,
231  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
232  return data_.fetch_and(arg, order);
233  }
234  inline value_type
235  fetch_and(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
236  return data_.fetch_and(arg, order);
237  }
238 
239  inline value_type fetch_or(value_type arg,
240  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
241  return data_.fetch_or(arg, order);
242  }
243  inline value_type
244  fetch_or(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
245  return data_.fetch_or(arg, order);
246  }
247 
248  inline value_type fetch_xor(value_type arg,
249  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
250  return data_.fetch_xor(arg, order);
251  }
252  inline value_type
253  fetch_xor(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
254  return data_.fetch_xor(arg, order);
255  }
256  };
257 #else
258 
259 #if defined(__clang__)
260 
261 #if !defined(__GCC_ATOMIC_INT_LOCK_FREE) && (!defined(__GNUC__) || __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1))
262 #error clang version is too old
263 #endif
264 
265 #if defined(__GCC_ATOMIC_INT_LOCK_FREE)
266 // @see https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
267 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC 1
268 #else
269 // @see https://gcc.gnu.org/onlinedocs/gcc-4.1.2/gcc/Atomic-Builtins.html
270 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC 1
271 #endif
272 
273 #elif defined(_MSC_VER)
274 
275 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC 1
276 
277 
278 #elif defined(__GNUC__) || defined(__INTEL_COMPILER)
279 
280 #if defined(__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1))
281 #error gcc version must be greater or equal than 4.1
282 #endif
283 
284 #if defined(__INTEL_COMPILER) && __INTEL_COMPILER < 1100
285 #error intel compiler version must be greater or equal than 11.0
286 #endif
287 
288 #if defined(__GCC_ATOMIC_INT_LOCK_FREE)
289 // @see https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
290 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC 1
291 #else
292 // @see https://gcc.gnu.org/onlinedocs/gcc-4.1.2/gcc/Atomic-Builtins.html
293 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC 1
294 #endif
295 
296 #else
297 
298 #error currently only gcc, msvc, intel compiler & llvm-clang are supported
299 
300 #endif
301 
302 #if defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
303  enum memory_order {
304  memory_order_relaxed = __ATOMIC_RELAXED,
305  memory_order_consume = __ATOMIC_CONSUME,
306  memory_order_acquire = __ATOMIC_ACQUIRE,
307  memory_order_release = __ATOMIC_RELEASE,
308  memory_order_acq_rel = __ATOMIC_ACQ_REL,
309  memory_order_seq_cst = __ATOMIC_SEQ_CST
310  };
311 
312 #define UTIL_LOCK_ATOMIC_THREAD_FENCE(order) __atomic_thread_fence(order)
313 #define UTIL_LOCK_ATOMIC_SIGNAL_FENCE(order) __atomic_signal_fence(order)
314 
315 #elif !defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC) // old gcc and old msvc use this
323  };
324 #endif
325 
326 #ifndef UTIL_LOCK_ATOMIC_THREAD_FENCE
327 #define UTIL_LOCK_ATOMIC_THREAD_FENCE(x)
328 #endif
329 
330 #ifndef UTIL_LOCK_ATOMIC_SIGNAL_FENCE
331 #define UTIL_LOCK_ATOMIC_SIGNAL_FENCE(x)
332 #endif
333 
334  template <typename Ty = int>
336  public:
337  typedef Ty value_type;
338 
339  private:
340 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
341  // char has no cas api in msvc
343 #else
344  volatile value_type data_;
345 #endif
346  atomic_int_type(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
347 #ifndef _MSC_VER
348  atomic_int_type &operator=(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
349  atomic_int_type &operator=(const atomic_int_type &) volatile UTIL_CONFIG_DELETED_FUNCTION;
350 #endif
351 
352  public:
353  atomic_int_type() UTIL_CONFIG_NOEXCEPT : data_() {
354 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
355 #if __cplusplus >= 201703L
356  if
357  constexpr(sizeof(data_) != sizeof(value_type)) {
358 #else
359  if (sizeof(data_) != sizeof(value_type)) {
360 #endif
361  data_ = static_cast<value_type>(data_);
362  }
363 #endif
364  }
365 
366  atomic_int_type(value_type desired) UTIL_CONFIG_NOEXCEPT : data_(desired) {}
367 
368  inline void
369  store(value_type desired,
371 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
373  typedef typename int_opr_t::opr_t opr_t;
374  int_opr_t::exchange(&data_, static_cast<opr_t>(desired), order);
375 
376 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
377  __atomic_store_n(&data_, desired, order);
378 #else
379  __sync_lock_test_and_set(&data_, desired);
380 #endif
381  }
382 
383  inline void store(value_type desired, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
384  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
385 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
387  typedef typename int_opr_t::opr_t opr_t;
388  int_opr_t::exchange(&data_, static_cast<opr_t>(desired), order);
389 
390 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
391  __atomic_store_n(&data_, desired, order);
392 #else
393  __sync_lock_test_and_set(&data_, desired);
394 #endif
395  }
396 
397  inline value_type
399 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
401  typedef typename int_opr_t::opr_t opr_t;
402  return static_cast<value_type>(int_opr_t:: or (const_cast<opr_t *>(&data_), static_cast<opr_t>(0), order));
403 
404 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
405  return __atomic_load_n(&data_, order);
406 #else
407  __sync_synchronize();
408  return data_;
409 #endif
410  }
411 
413  volatile UTIL_CONFIG_NOEXCEPT {
414 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
416  typedef typename int_opr_t::opr_t opr_t;
417  return static_cast<value_type>(int_opr_t:: or (const_cast<opr_t *>(&data_), static_cast<opr_t>(0), order));
418 
419 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
420  return __atomic_load_n(&data_, order);
421 #else
422  __sync_synchronize();
423  return data_;
424 #endif
425  }
426 
427  inline operator value_type() const UTIL_CONFIG_NOEXCEPT { return load(); }
428  inline operator value_type() const volatile UTIL_CONFIG_NOEXCEPT { return load(); }
429 
430  inline value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT {
431  store(desired);
432  return desired;
433  }
434  inline value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT {
435  store(desired);
436  return desired;
437  }
438 
439  inline value_type operator++() UTIL_CONFIG_NOEXCEPT {
440 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
442  typedef typename int_opr_t::opr_t opr_t;
443  return static_cast<value_type>(int_opr_t::inc(&data_, ::util::lock::memory_order_seq_cst));
444 #else
445  return fetch_add(1) + 1;
446 #endif
447  }
448  inline value_type operator++() volatile UTIL_CONFIG_NOEXCEPT {
449 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
451  typedef typename int_opr_t::opr_t opr_t;
452  return static_cast<value_type>(int_opr_t::inc(&data_, ::util::lock::memory_order_seq_cst));
453 #else
454  return fetch_add(1) + 1;
455 #endif
456  }
457  inline value_type operator++(int) UTIL_CONFIG_NOEXCEPT {
458 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
460  typedef typename int_opr_t::opr_t opr_t;
461  return static_cast<value_type>(int_opr_t::inc(&data_, ::util::lock::memory_order_seq_cst) - 1);
462 #else
463  return fetch_add(1);
464 #endif
465  }
466  inline value_type operator++(int) volatile UTIL_CONFIG_NOEXCEPT {
467 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
469  typedef typename int_opr_t::opr_t opr_t;
470  return static_cast<value_type>(int_opr_t::inc(&data_, ::util::lock::memory_order_seq_cst) - 1);
471 #else
472  return fetch_add(1);
473 #endif
474  }
475  inline value_type operator--() UTIL_CONFIG_NOEXCEPT {
476 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
478  typedef typename int_opr_t::opr_t opr_t;
479  return static_cast<value_type>(int_opr_t::dec(&data_, ::util::lock::memory_order_seq_cst));
480 #else
481  return fetch_sub(1) - 1;
482 #endif
483  }
484  inline value_type operator--() volatile UTIL_CONFIG_NOEXCEPT {
485 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
487  typedef typename int_opr_t::opr_t opr_t;
488  return static_cast<value_type>(int_opr_t::dec(&data_, ::util::lock::memory_order_seq_cst));
489 #else
490  return fetch_sub(1) - 1;
491 #endif
492  }
493  inline value_type operator--(int) UTIL_CONFIG_NOEXCEPT {
494 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
496  typedef typename int_opr_t::opr_t opr_t;
497  return static_cast<value_type>(int_opr_t::dec(&data_, ::util::lock::memory_order_seq_cst) + 1);
498 #else
499  return fetch_sub(1);
500 #endif
501  }
502  inline value_type operator--(int) volatile UTIL_CONFIG_NOEXCEPT {
503 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
505  typedef typename int_opr_t::opr_t opr_t;
506  return static_cast<value_type>(int_opr_t::dec(&data_, ::util::lock::memory_order_seq_cst) + 1);
507 #else
508  return fetch_sub(1);
509 #endif
510  }
511 
512  inline value_type
513  exchange(value_type desired,
515 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
517  typedef typename int_opr_t::opr_t opr_t;
518  return static_cast<value_type>(static_cast<value_type>(int_opr_t::exchange(&data_, static_cast<opr_t>(desired), order)));
519 
520 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
521  return __atomic_exchange_n(&data_, desired, order);
522 #else
523  value_type old_value = data_;
524  while (!__sync_bool_compare_and_swap(&data_, old_value, desired)) {
525  old_value = data_;
526  }
527  return old_value;
528 #endif
529  }
530 
531  inline value_type exchange(value_type desired, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
532  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
533 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
535  typedef typename int_opr_t::opr_t opr_t;
536  return static_cast<value_type>(static_cast<value_type>(int_opr_t::exchange(&data_, static_cast<opr_t>(desired), order)));
537 
538 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
539  return __atomic_exchange_n(&data_, desired, order);
540 #else
541  value_type old_value = data_;
542  while (!__sync_bool_compare_and_swap(&data_, old_value, desired)) {
543  old_value = data_;
544  }
545  return old_value;
546 #endif
547  }
548 
549  inline bool compare_exchange_weak(value_type &expected, value_type desired,
551  EXPLICIT_UNUSED_ATTR ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
552 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
554  typedef typename int_opr_t::opr_t opr_t;
555  if (expected ==
556  static_cast<value_type>(int_opr_t::cas(&data_, static_cast<opr_t>(desired), static_cast<opr_t>(expected), success))) {
557  return true;
558  } else {
559  expected = static_cast<value_type>(data_);
560  return false;
561  }
562 
563 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
564  return __atomic_compare_exchange_n(&data_, &expected, desired, true, success, failure);
565 #else
566  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
567  return true;
568  } else {
569  expected = data_;
570  return false;
571  }
572 #endif
573  }
574 
575  inline bool compare_exchange_weak(value_type &expected, value_type desired,
577  EXPLICIT_UNUSED_ATTR ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
578 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
580  typedef typename int_opr_t::opr_t opr_t;
581  if (expected ==
582  static_cast<value_type>(int_opr_t::cas(&data_, static_cast<opr_t>(desired), static_cast<opr_t>(expected), success))) {
583  return true;
584  } else {
585  expected = static_cast<value_type>(data_);
586  return false;
587  }
588 
589 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
590  return __atomic_compare_exchange_n(&data_, &expected, desired, true, success, failure);
591 #else
592  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
593  return true;
594  } else {
595  expected = data_;
596  return false;
597  }
598 #endif
599  }
600 
601  inline bool compare_exchange_weak(value_type &expected, value_type desired,
603  UTIL_CONFIG_NOEXCEPT {
604 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
606  typedef typename int_opr_t::opr_t opr_t;
607  if (expected ==
608  static_cast<value_type>(int_opr_t::cas(&data_, static_cast<opr_t>(desired), static_cast<opr_t>(expected), order))) {
609  return true;
610  } else {
611  expected = static_cast<value_type>(data_);
612  return false;
613  }
614 
615 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
616  return __atomic_compare_exchange_n(&data_, &expected, desired, true, order, order);
617 #else
618  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
619  return true;
620  } else {
621  expected = data_;
622  return false;
623  }
624 #endif
625  }
626 
628  value_type &expected, value_type desired,
630 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
632  typedef typename int_opr_t::opr_t opr_t;
633  if (expected ==
634  static_cast<value_type>(int_opr_t::cas(&data_, static_cast<opr_t>(desired), static_cast<opr_t>(expected), order))) {
635  return true;
636  } else {
637  expected = data_;
638  return false;
639  }
640 
641 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
642  return __atomic_compare_exchange_n(&data_, &expected, desired, true, order, order);
643 #else
644  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
645  return true;
646  } else {
647  expected = data_;
648  return false;
649  }
650 #endif
651  }
652 
653  inline bool compare_exchange_strong(value_type &expected, value_type desired,
655  EXPLICIT_UNUSED_ATTR ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
656 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
658  typedef typename int_opr_t::opr_t opr_t;
659  if (expected ==
660  static_cast<value_type>(int_opr_t::cas(&data_, static_cast<opr_t>(desired), static_cast<opr_t>(expected), success))) {
661  return true;
662  } else {
663  expected = data_;
664  return false;
665  }
666 
667 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
668  return __atomic_compare_exchange_n(&data_, &expected, desired, false, success, failure);
669 #else
670  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
671  return true;
672  } else {
673  expected = data_;
674  return false;
675  }
676 #endif
677  }
678 
679  inline bool compare_exchange_strong(value_type &expected, value_type desired,
681  EXPLICIT_UNUSED_ATTR ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
682 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
684  typedef typename int_opr_t::opr_t opr_t;
685  if (expected ==
686  static_cast<value_type>(int_opr_t::cas(&data_, static_cast<opr_t>(desired), static_cast<opr_t>(expected), success))) {
687  return true;
688  } else {
689  expected = data_;
690  return false;
691  }
692 
693 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
694  return __atomic_compare_exchange_n(&data_, &expected, desired, false, success, failure);
695 #else
696  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
697  return true;
698  } else {
699  expected = data_;
700  return false;
701  }
702 #endif
703  }
704 
705  inline bool compare_exchange_strong(value_type &expected, value_type desired,
707  UTIL_CONFIG_NOEXCEPT {
708 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
710  typedef typename int_opr_t::opr_t opr_t;
711  if (expected ==
712  static_cast<value_type>(int_opr_t::cas(&data_, static_cast<opr_t>(desired), static_cast<opr_t>(expected), order))) {
713  return true;
714  } else {
715  expected = static_cast<value_type>(data_);
716  return false;
717  }
718 
719 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
720  return __atomic_compare_exchange_n(&data_, &expected, desired, false, order, order);
721 #else
722  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
723  return true;
724  } else {
725  expected = data_;
726  return false;
727  }
728 #endif
729  }
730 
732  value_type &expected, value_type desired,
734 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
736  typedef typename int_opr_t::opr_t opr_t;
737  if (expected ==
738  static_cast<value_type>(int_opr_t::cas(&data_, static_cast<opr_t>(desired), static_cast<opr_t>(expected), order))) {
739  return true;
740  } else {
741  expected = static_cast<value_type>(data_);
742  return false;
743  }
744 
745 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
746  return __atomic_compare_exchange_n(&data_, &expected, desired, false, order, order);
747 #else
748  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
749  return true;
750  } else {
751  expected = data_;
752  return false;
753  }
754 #endif
755  }
756 
757  inline value_type
758  fetch_add(value_type arg,
760 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
762  typedef typename int_opr_t::opr_t opr_t;
763  return static_cast<value_type>(int_opr_t::add(&data_, static_cast<opr_t>(arg), order)) - arg;
764 
765 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
766  return __atomic_fetch_add(&data_, arg, order);
767 #else
768  return __sync_fetch_and_add(&data_, arg);
769 #endif
770  }
771  inline value_type fetch_add(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
772  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
773 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
775  typedef typename int_opr_t::opr_t opr_t;
776  return static_cast<value_type>(int_opr_t::add(&data_, static_cast<opr_t>(arg), order)) - arg;
777 
778 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
779  return __atomic_fetch_add(&data_, arg, order);
780 #else
781  return __sync_fetch_and_add(&data_, arg);
782 #endif
783  }
784 
785  inline value_type
786  fetch_sub(value_type arg,
788 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
790  typedef typename int_opr_t::opr_t opr_t;
791  return static_cast<value_type>(int_opr_t::sub(&data_, static_cast<opr_t>(arg), order)) + arg;
792 
793 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
794  return __atomic_fetch_sub(&data_, arg, order);
795 #else
796  return __sync_fetch_and_sub(&data_, arg);
797 #endif
798  }
799  inline value_type fetch_sub(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
800  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
801 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
803  typedef typename int_opr_t::opr_t opr_t;
804  return static_cast<value_type>(int_opr_t::sub(&data_, static_cast<opr_t>(arg), order)) + arg;
805 
806 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
807  return __atomic_fetch_sub(&data_, arg, order);
808 #else
809  return __sync_fetch_and_sub(&data_, arg);
810 #endif
811  }
812 
813  inline value_type
814  fetch_and(value_type arg,
816 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
818  typedef typename int_opr_t::opr_t opr_t;
819  return static_cast<value_type>(int_opr_t::and(&data_, static_cast<opr_t>(arg), order));
820 
821 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
822  return __atomic_fetch_and(&data_, arg, order);
823 #else
824  return __sync_fetch_and_and(&data_, arg);
825 #endif
826  }
827  inline value_type fetch_and(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
828  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
829 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
831  typedef typename int_opr_t::opr_t opr_t;
832  return static_cast<value_type>(int_opr_t::and(&data_, static_cast<opr_t>(arg), order));
833 
834 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
835  return __atomic_fetch_and(&data_, arg, order);
836 #else
837  return __sync_fetch_and_and(&data_, arg);
838 #endif
839  }
840 
841  inline value_type
842  fetch_or(value_type arg,
844 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
846  typedef typename int_opr_t::opr_t opr_t;
847  return static_cast<value_type>(int_opr_t:: or (&data_, static_cast<opr_t>(arg), order));
848 
849 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
850  return __atomic_fetch_or(&data_, arg, order);
851 #else
852  return __sync_fetch_and_or(&data_, arg);
853 #endif
854  }
855  inline value_type fetch_or(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
856  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
857 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
859  typedef typename int_opr_t::opr_t opr_t;
860  return static_cast<value_type>(int_opr_t:: or (&data_, static_cast<opr_t>(arg), order));
861 
862 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
863  return __atomic_fetch_or(&data_, arg, order);
864 #else
865  return __sync_fetch_and_or(&data_, arg);
866 #endif
867  }
868 
869  inline value_type
870  fetch_xor(value_type arg,
872 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
874  typedef typename int_opr_t::opr_t opr_t;
875  return static_cast<value_type>(int_opr_t:: xor (&data_, static_cast<opr_t>(arg), order));
876 
877 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
878  return __atomic_fetch_xor(&data_, arg, order);
879 #else
880  return __sync_fetch_and_xor(&data_, arg);
881 #endif
882  }
883  inline value_type fetch_xor(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
884  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
885 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
887  typedef typename int_opr_t::opr_t opr_t;
888  return static_cast<value_type>(int_opr_t:: xor (&data_, static_cast<opr_t>(arg), order));
889 
890 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
891  return __atomic_fetch_xor(&data_, arg, order);
892 #else
893  return __sync_fetch_and_xor(&data_, arg);
894 #endif
895  }
896  };
897 
898 #endif
899 
900  // used for unsafe (not multi-thread safe)
901  template <typename Ty = int>
903  typedef Ty value_type;
904  };
905 
906  template <typename Ty>
908  public:
910 
911  private:
912  value_type data_;
913  atomic_int_type(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
914 #ifndef _MSC_VER
915  atomic_int_type &operator=(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
916  atomic_int_type &operator=(const atomic_int_type &) volatile UTIL_CONFIG_DELETED_FUNCTION;
917 #endif
918 
919  public:
920  atomic_int_type() : data_() {}
921  atomic_int_type(value_type desired) : data_(desired) {}
922 
923  inline void
924  store(value_type desired,
926  data_ = desired;
927  }
928  inline void store(value_type desired, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
929  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
930  data_ = desired;
931  }
932 
933  inline value_type
935  return data_;
936  }
938  volatile UTIL_CONFIG_NOEXCEPT {
939  return data_;
940  }
941 
942  inline operator value_type() const UTIL_CONFIG_NOEXCEPT { return load(); }
943  inline operator value_type() const volatile UTIL_CONFIG_NOEXCEPT { return load(); }
944 
945  inline value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT {
946  store(desired);
947  return desired;
948  }
949  inline value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT {
950  store(desired);
951  return desired;
952  }
953 
954  inline value_type operator++() UTIL_CONFIG_NOEXCEPT { return ++data_; }
955  inline value_type operator++() volatile UTIL_CONFIG_NOEXCEPT { return ++data_; }
956  inline value_type operator++(int) UTIL_CONFIG_NOEXCEPT { return data_++; }
957  inline value_type operator++(int) volatile UTIL_CONFIG_NOEXCEPT { return data_++; }
958  inline value_type operator--() UTIL_CONFIG_NOEXCEPT { return --data_; }
959  inline value_type operator--() volatile UTIL_CONFIG_NOEXCEPT { return --data_; }
960  inline value_type operator--(int) UTIL_CONFIG_NOEXCEPT { return data_--; }
961  inline value_type operator--(int) volatile UTIL_CONFIG_NOEXCEPT { return data_--; }
962 
963  inline value_type
964  exchange(value_type desired,
966  value_type ret = data_;
967  data_ = desired;
968  return ret;
969  }
970  inline value_type exchange(value_type desired, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
971  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
972  value_type ret = data_;
973  data_ = desired;
974  return ret;
975  }
976 
977  private:
978  inline bool cas(value_type &expected, value_type desired) UTIL_CONFIG_NOEXCEPT {
979  if (likely(data_ == expected)) {
980  data_ = desired;
981  return true;
982  } else {
983  expected = data_;
984  return false;
985  }
986  }
987 
988  public:
989  inline bool compare_exchange_weak(value_type &expected, value_type desired,
991  EXPLICIT_UNUSED_ATTR ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
992  return cas(expected, desired);
993  }
994  inline bool compare_exchange_weak(value_type &expected, value_type desired,
996  EXPLICIT_UNUSED_ATTR ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
997  return cas(expected, desired);
998  }
999 
1000  inline bool compare_exchange_weak(value_type &expected, value_type desired,
1002  UTIL_CONFIG_NOEXCEPT {
1003  return cas(expected, desired);
1004  }
1006  value_type &expected, value_type desired,
1008  return cas(expected, desired);
1009  }
1010 
1011  inline bool compare_exchange_strong(value_type &expected, value_type desired,
1013  EXPLICIT_UNUSED_ATTR ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
1014  return cas(expected, desired);
1015  }
1016  inline bool compare_exchange_strong(value_type &expected, value_type desired,
1018  EXPLICIT_UNUSED_ATTR ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
1019  return cas(expected, desired);
1020  }
1021 
1022  inline bool compare_exchange_strong(value_type &expected, value_type desired,
1024  UTIL_CONFIG_NOEXCEPT {
1025  return cas(expected, desired);
1026  }
1028  value_type &expected, value_type desired,
1030  return cas(expected, desired);
1031  }
1032 
1033  inline value_type
1034  fetch_add(value_type arg,
1036  value_type ret = data_;
1037  data_ += arg;
1038  return ret;
1039  }
1040  inline value_type fetch_add(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
1041  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
1042  value_type ret = data_;
1043  data_ += arg;
1044  return ret;
1045  }
1046 
1047  inline value_type
1048  fetch_sub(value_type arg,
1050  value_type ret = data_;
1051  data_ -= arg;
1052  return ret;
1053  }
1054  inline value_type fetch_sub(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
1055  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
1056  value_type ret = data_;
1057  data_ -= arg;
1058  return ret;
1059  }
1060 
1061  inline value_type
1062  fetch_and(value_type arg,
1064  value_type ret = data_;
1065  data_ &= arg;
1066  return ret;
1067  }
1068  inline value_type fetch_and(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
1069  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
1070  value_type ret = data_;
1071  data_ &= arg;
1072  return ret;
1073  }
1074 
1075  inline value_type
1076  fetch_or(value_type arg,
1078  value_type ret = data_;
1079  data_ |= arg;
1080  return ret;
1081  }
1082  inline value_type fetch_or(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
1083  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
1084  value_type ret = data_;
1085  data_ |= arg;
1086  return ret;
1087  }
1088 
1089  inline value_type
1090  fetch_xor(value_type arg,
1092  value_type ret = data_;
1093  data_ ^= arg;
1094  return ret;
1095  }
1096  inline value_type fetch_xor(value_type arg, EXPLICIT_UNUSED_ATTR ::util::lock::memory_order order =
1097  ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
1098  value_type ret = data_;
1099  data_ ^= arg;
1100  return ret;
1101  }
1102  };
1103  } // namespace lock
1104 } // namespace util
1105 
1106 #endif /* _UTIL_LOCK_ATOMIC_INT_TYPE_H_ */
value_type operator++(int) UTIL_CONFIG_NOEXCEPT
value_type load(EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) const volatile UTIL_CONFIG_NOEXCEPT
value_type exchange(value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_add(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_sub(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
void store(value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_sub(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
导入继承关系约束 Licensed under the MIT licenses.
void store(value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_xor(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_sub(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type operator--() volatile UTIL_CONFIG_NOEXCEPT
value_type operator++(int) volatile UTIL_CONFIG_NOEXCEPT
value_type operator--() UTIL_CONFIG_NOEXCEPT
value_type operator--(int) UTIL_CONFIG_NOEXCEPT
value_type operator--(int) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type exchange(value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order success, EXPLICIT_UNUSED_ATTR::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT
value_type operator++(int) volatile UTIL_CONFIG_NOEXCEPT
value_type operator++(int) UTIL_CONFIG_NOEXCEPT
value_type fetch_and(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order success, EXPLICIT_UNUSED_ATTR::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT
atomic_int_type() UTIL_CONFIG_NOEXCEPT
value_type fetch_and(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
#define likely(x)
value_type fetch_add(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT
bool cas(value_type &expected, value_type desired) UTIL_CONFIG_NOEXCEPT
void store(value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_or(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order success, EXPLICIT_UNUSED_ATTR::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_xor(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
volatile value_type data_
atomic_int_type(value_type desired) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order success, EXPLICIT_UNUSED_ATTR::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order success, EXPLICIT_UNUSED_ATTR::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT
atomic_int_type & operator=(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION
value_type operator++() volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_or(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_or(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type load(EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) const UTIL_CONFIG_NOEXCEPT
value_type exchange(value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type operator--(int) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_and(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_and(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_add(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order success, EXPLICIT_UNUSED_ATTR::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT
value_type operator--() volatile UTIL_CONFIG_NOEXCEPT
value_type operator++() UTIL_CONFIG_NOEXCEPT
value_type fetch_add(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type operator++() volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_or(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type load(EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) const UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_xor(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order success, EXPLICIT_UNUSED_ATTR::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_xor(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
整数类型的原子操作-MSVC统一接口 Licensed under the MIT licenses.
value_type operator--(int) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type load(EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) const volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order success, EXPLICIT_UNUSED_ATTR::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT
value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type exchange(value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_sub(value_type arg, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
void store(value_type desired, EXPLICIT_UNUSED_ATTR::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT