libcopp  1.1.0
atomic_int_type.h
Go to the documentation of this file.
1 
18 #ifndef UTIL_LOCK_ATOMIC_INT_TYPE_H
19 #define UTIL_LOCK_ATOMIC_INT_TYPE_H
20 
21 #pragma once
22 
23 #if defined(__cplusplus) && __cplusplus >= 201103L
24 
25 #include <atomic>
26 #define __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
27 
28 #elif defined(__clang__) && (__clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 1)) && __cplusplus >= 201103L
29 
30 #include <atomic>
31 #define __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
32 
33 #elif defined(_MSC_VER) && (_MSC_VER > 1700 || (defined(_HAS_CPP0X) && _HAS_CPP0X)) // VC 11,2012
34 
35 #include <atomic>
36 #define __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
37 
38 // There is a BUG in gcc 4.6, which will cause 'undefined reference to `std::atomic_thread_fence(std::memory_order)'
39 // In gcc 4.7 and upper, we can use -std=c++11 or upper
40 // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=51038
41 // #elif defined(__GNUC__) && ((__GNUC__ == 4 && __GNUC_MINOR__ >= 5) || __GNUC__ > 4) && defined(__GXX_EXPERIMENTAL_CXX0X__)
42 //
43 // #include <atomic>
44 // #define __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
45 
46 #endif
47 
48 #if !defined(__UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD) && defined(_MSC_VER)
50 #endif
51 
52 #include <cstddef>
53 
55 #include <libcopp/utils/features.h>
56 
57 
58 namespace util {
59  namespace lock {
60 #ifdef __UTIL_LOCK_ATOMIC_INT_TYPE_ATOMIC_STD
68 
69 #define UTIL_LOCK_ATOMIC_THREAD_FENCE(order) ::std::atomic_thread_fence(order)
70 #define UTIL_LOCK_ATOMIC_SIGNAL_FENCE(order) ::std::atomic_signal_fence(order)
71 
78  template <typename Ty = int>
79  class atomic_int_type {
80  public:
81  typedef Ty value_type;
82 
83  private:
84  ::std::atomic<value_type> data_;
85  atomic_int_type(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
86 #ifndef _MSC_VER
87  atomic_int_type &operator=(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
88  atomic_int_type &operator=(const atomic_int_type &) volatile UTIL_CONFIG_DELETED_FUNCTION;
89 #endif
90 
91  public:
92  atomic_int_type() UTIL_CONFIG_NOEXCEPT : data_() {}
93  atomic_int_type(value_type desired) UTIL_CONFIG_NOEXCEPT : data_(desired) {}
94 
95  inline void store(value_type desired,
96  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
97  data_.store(desired, order);
98  }
99  inline void store(value_type desired,
100  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
101  data_.store(desired, order);
102  }
103 
104  inline value_type load(::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) const UTIL_CONFIG_NOEXCEPT {
105  return data_.load(order);
106  }
107  inline value_type load(::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) const
108  volatile UTIL_CONFIG_NOEXCEPT {
109  return data_.load(order);
110  }
111 
112  inline operator value_type() const UTIL_CONFIG_NOEXCEPT { return load(); }
113  inline operator value_type() const volatile UTIL_CONFIG_NOEXCEPT { return load(); }
114 
115  inline value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT { store(desired); }
116  inline value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT { store(desired); }
117 
118  inline value_type operator++() UTIL_CONFIG_NOEXCEPT { return ++data_; }
119  inline value_type operator++() volatile UTIL_CONFIG_NOEXCEPT { return ++data_; }
120  inline value_type operator++(int)UTIL_CONFIG_NOEXCEPT { return data_++; }
121  inline value_type operator++(int)volatile UTIL_CONFIG_NOEXCEPT { return data_++; }
122  inline value_type operator--() UTIL_CONFIG_NOEXCEPT { return --data_; }
123  inline value_type operator--() volatile UTIL_CONFIG_NOEXCEPT { return --data_; }
124  inline value_type operator--(int)UTIL_CONFIG_NOEXCEPT { return data_--; }
125  inline value_type operator--(int)volatile UTIL_CONFIG_NOEXCEPT { return data_--; }
126 
127  inline value_type exchange(value_type desired,
128  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
129  return data_.exchange(desired, order);
130  }
131  inline value_type
132  exchange(value_type desired,
133  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
134  return data_.exchange(desired, order);
135  }
136 
137  inline bool compare_exchange_weak(value_type &expected, value_type desired, ::util::lock::memory_order success,
138  ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
139  return data_.compare_exchange_weak(expected, desired, success, failure);
140  }
141  inline bool compare_exchange_weak(value_type &expected, value_type desired, ::util::lock::memory_order success,
142  ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
143  return data_.compare_exchange_weak(expected, desired, success, failure);
144  }
145 
146  inline bool compare_exchange_weak(value_type &expected, value_type desired,
147  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
148  return data_.compare_exchange_weak(expected, desired, order);
149  }
150  inline bool
151  compare_exchange_weak(value_type &expected, value_type desired,
152  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
153  return data_.compare_exchange_weak(expected, desired, order);
154  }
155 
156  inline bool compare_exchange_strong(value_type &expected, value_type desired, ::util::lock::memory_order success,
157  ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
158  return data_.compare_exchange_strong(expected, desired, success, failure);
159  }
160  inline bool compare_exchange_strong(value_type &expected, value_type desired, ::util::lock::memory_order success,
161  ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
162  return data_.compare_exchange_strong(expected, desired, success, failure);
163  }
164 
165  inline bool
166  compare_exchange_strong(value_type &expected, value_type desired,
167  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
168  return data_.compare_exchange_strong(expected, desired, order);
169  }
170  inline bool
171  compare_exchange_strong(value_type &expected, value_type desired,
172  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
173  return data_.compare_exchange_strong(expected, desired, order);
174  }
175 
176  inline value_type fetch_add(value_type arg,
177  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
178  return data_.fetch_add(arg, order);
179  }
180  inline value_type
181  fetch_add(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
182  return data_.fetch_add(arg, order);
183  }
184 
185  inline value_type fetch_sub(value_type arg,
186  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
187  return data_.fetch_sub(arg, order);
188  }
189  inline value_type
190  fetch_sub(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
191  return data_.fetch_sub(arg, order);
192  }
193 
194  inline value_type fetch_and(value_type arg,
195  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
196  return data_.fetch_and(arg, order);
197  }
198  inline value_type
199  fetch_and(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
200  return data_.fetch_and(arg, order);
201  }
202 
203  inline value_type fetch_or(value_type arg,
204  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
205  return data_.fetch_or(arg, order);
206  }
207  inline value_type
208  fetch_or(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
209  return data_.fetch_or(arg, order);
210  }
211 
212  inline value_type fetch_xor(value_type arg,
213  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
214  return data_.fetch_xor(arg, order);
215  }
216  inline value_type
217  fetch_xor(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
218  return data_.fetch_xor(arg, order);
219  }
220  };
221 #else
222 
223 #if defined(__clang__)
224 
225 #if !defined(__GCC_ATOMIC_INT_LOCK_FREE) && (!defined(__GNUC__) || __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1))
226 #error clang version is too old
227 #endif
228 
229 #if defined(__GCC_ATOMIC_INT_LOCK_FREE)
230 // @see https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
231 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC 1
232 #else
233 // @see https://gcc.gnu.org/onlinedocs/gcc-4.1.2/gcc/Atomic-Builtins.html
234 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC 1
235 #endif
236 
237 #elif defined(_MSC_VER)
238 
239 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC 1
240 
241 
242 #elif defined(__GNUC__) || defined(__INTEL_COMPILER)
243 
244 #if defined(__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1))
245 #error gcc version must be greater or equal than 4.1
246 #endif
247 
248 #if defined(__INTEL_COMPILER) && __INTEL_COMPILER < 1100
249 #error intel compiler version must be greater or equal than 11.0
250 #endif
251 
252 #if defined(__GCC_ATOMIC_INT_LOCK_FREE)
253 // @see https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
254 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC 1
255 #else
256 // @see https://gcc.gnu.org/onlinedocs/gcc-4.1.2/gcc/Atomic-Builtins.html
257 #define __UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC 1
258 #endif
259 
260 #else
261 
262 #error currently only gcc, msvc, intel compiler & llvm-clang are supported
263 
264 #endif
265 
266 #if defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
267  enum memory_order {
268  memory_order_relaxed = __ATOMIC_RELAXED,
269  memory_order_consume = __ATOMIC_CONSUME,
270  memory_order_acquire = __ATOMIC_ACQUIRE,
271  memory_order_release = __ATOMIC_RELEASE,
272  memory_order_acq_rel = __ATOMIC_ACQ_REL,
273  memory_order_seq_cst = __ATOMIC_SEQ_CST
274  };
275 
276 #define UTIL_LOCK_ATOMIC_THREAD_FENCE(order) __atomic_thread_fence(order)
277 #define UTIL_LOCK_ATOMIC_SIGNAL_FENCE(order) __atomic_signal_fence(order)
278 
279 #elif !defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC) // old gcc and old msvc use this
287  };
288 #endif
289 
290 #ifndef UTIL_LOCK_ATOMIC_THREAD_FENCE
291 #define UTIL_LOCK_ATOMIC_THREAD_FENCE(x)
292 #endif
293 
294 #ifndef UTIL_LOCK_ATOMIC_SIGNAL_FENCE
295 #define UTIL_LOCK_ATOMIC_SIGNAL_FENCE(x)
296 #endif
297 
298  template <typename Ty = int>
300  public:
301  typedef Ty value_type;
302 
303  private:
304 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
305  // char has no cas api in msvc
306  union {
307  volatile value_type data_;
308  volatile short padding;
309  };
310 #else
311  volatile value_type data_;
312 #endif
313  atomic_int_type(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
314 #ifndef _MSC_VER
315  atomic_int_type &operator=(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
316  atomic_int_type &operator=(const atomic_int_type &) volatile UTIL_CONFIG_DELETED_FUNCTION;
317 #endif
318 
319  public:
320  atomic_int_type() UTIL_CONFIG_NOEXCEPT : data_() {}
321  atomic_int_type(value_type desired) UTIL_CONFIG_NOEXCEPT : data_(desired) {}
322 
323  inline void store(value_type desired,
324  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
325 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
327  typedef typename int_opr_t::opr_t opr_t;
328  int_opr_t::exchange(reinterpret_cast<volatile opr_t *>(&data_), static_cast<opr_t>(desired), order);
329 
330 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
331  __atomic_store_n(&data_, desired, order);
332 #else
333  __sync_lock_test_and_set(&data_, desired);
334 #endif
335  }
336 
337  inline void store(value_type desired,
338  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
339 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
341  typedef typename int_opr_t::opr_t opr_t;
342  int_opr_t::exchange(static_cast<opr_t *>(&data_), static_cast<opr_t>(desired), order);
343 
344 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
345  __atomic_store_n(&data_, desired, order);
346 #else
347  __sync_lock_test_and_set(&data_, desired);
348 #endif
349  }
350 
351  inline value_type load(::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) const UTIL_CONFIG_NOEXCEPT {
352 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
354  typedef typename int_opr_t::opr_t opr_t;
355  return int_opr_t:: or (const_cast<opr_t *>(reinterpret_cast<volatile const opr_t *>(&data_)), static_cast<opr_t>(0), order);
356 
357 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
358  return __atomic_load_n(&data_, order);
359 #else
360  __sync_synchronize();
361  return data_;
362 #endif
363  }
364 
366  volatile UTIL_CONFIG_NOEXCEPT {
367 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
369  typedef typename int_opr_t::opr_t opr_t;
370  return int_opr_t:: or (const_cast<opr_t *>(reinterpret_cast<volatile const opr_t *>(&data_)), static_cast<opr_t>(0), order);
371 
372 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
373  return __atomic_load_n(&data_, order);
374 #else
375  __sync_synchronize();
376  return data_;
377 #endif
378  }
379 
380  inline operator value_type() const UTIL_CONFIG_NOEXCEPT { return load(); }
381  inline operator value_type() const volatile UTIL_CONFIG_NOEXCEPT { return load(); }
382 
383  inline value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT { store(desired); }
384  inline value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT { store(desired); }
385 
386  inline value_type operator++() UTIL_CONFIG_NOEXCEPT {
387 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
389  typedef typename int_opr_t::opr_t opr_t;
390  return int_opr_t::inc(reinterpret_cast<volatile opr_t *>(&data_), ::util::lock::memory_order_seq_cst);
391 #else
392  return fetch_add(1) + 1;
393 #endif
394  }
395  inline value_type operator++() volatile UTIL_CONFIG_NOEXCEPT {
396 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
398  typedef typename int_opr_t::opr_t opr_t;
399  return int_opr_t::inc(reinterpret_cast<volatile opr_t *>(&data_), ::util::lock::memory_order_seq_cst);
400 #else
401  return fetch_add(1) + 1;
402 #endif
403  }
404  inline value_type operator++(int)UTIL_CONFIG_NOEXCEPT {
405 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
407  typedef typename int_opr_t::opr_t opr_t;
408  return int_opr_t::inc(reinterpret_cast<volatile opr_t *>(&data_), ::util::lock::memory_order_seq_cst) - 1;
409 #else
410  return fetch_add(1);
411 #endif
412  }
413  inline value_type operator++(int)volatile UTIL_CONFIG_NOEXCEPT {
414 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
416  typedef typename int_opr_t::opr_t opr_t;
417  return int_opr_t::inc(reinterpret_cast<volatile opr_t *>(&data_), ::util::lock::memory_order_seq_cst) - 1;
418 #else
419  return fetch_add(1);
420 #endif
421  }
422  inline value_type operator--() UTIL_CONFIG_NOEXCEPT {
423 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
425  typedef typename int_opr_t::opr_t opr_t;
426  return int_opr_t::dec(reinterpret_cast<volatile opr_t *>(&data_), ::util::lock::memory_order_seq_cst);
427 #else
428  return fetch_sub(1) - 1;
429 #endif
430  }
431  inline value_type operator--() volatile UTIL_CONFIG_NOEXCEPT {
432 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
434  typedef typename int_opr_t::opr_t opr_t;
435  return int_opr_t::dec(reinterpret_cast<volatile opr_t *>(&data_), ::util::lock::memory_order_seq_cst);
436 #else
437  return fetch_sub(1) - 1;
438 #endif
439  }
440  inline value_type operator--(int)UTIL_CONFIG_NOEXCEPT {
441 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
443  typedef typename int_opr_t::opr_t opr_t;
444  return int_opr_t::dec(reinterpret_cast<volatile opr_t *>(&data_), ::util::lock::memory_order_seq_cst) + 1;
445 #else
446  return fetch_sub(1);
447 #endif
448  }
449  inline value_type operator--(int)volatile UTIL_CONFIG_NOEXCEPT {
450 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
452  typedef typename int_opr_t::opr_t opr_t;
453  return int_opr_t::dec(static_cast<opr_t *>(&data_), ::util::lock::memory_order_seq_cst) + 1;
454 #else
455  return fetch_sub(1);
456 #endif
457  }
458 
459  inline value_type exchange(value_type desired,
460  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
461 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
463  typedef typename int_opr_t::opr_t opr_t;
464  return static_cast<value_type>(
465  int_opr_t::exchange(reinterpret_cast<volatile opr_t *>(&data_), static_cast<opr_t>(desired), order));
466 
467 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
468  return __atomic_exchange_n(&data_, desired, order);
469 #else
470  value_type old_value = data_;
471  while (!__sync_bool_compare_and_swap(&data_, old_value, desired)) {
472  old_value = data_;
473  }
474  return old_value;
475 #endif
476  }
477 
478  inline value_type
479  exchange(value_type desired,
480  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
481 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
483  typedef typename int_opr_t::opr_t opr_t;
484  return static_cast<value_type>(int_opr_t::exchange(static_cast<opr_t *>(&data_), static_cast<opr_t>(desired), order));
485 
486 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
487  return __atomic_exchange_n(&data_, desired, order);
488 #else
489  value_type old_value = data_;
490  while (!__sync_bool_compare_and_swap(&data_, old_value, desired)) {
491  old_value = data_;
492  }
493  return old_value;
494 #endif
495  }
496 
497  inline bool compare_exchange_weak(value_type &expected, value_type desired, ::util::lock::memory_order success,
498  ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
499 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
501  typedef typename int_opr_t::opr_t opr_t;
502  if (expected == static_cast<value_type>(int_opr_t::cas(static_cast<opr_t *>(&data_), static_cast<opr_t>(desired),
503  static_cast<opr_t>(expected), success))) {
504  return true;
505  } else {
506  expected = data_;
507  return false;
508  }
509 
510 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
511  return __atomic_compare_exchange_n(&data_, &expected, desired, true, success, failure);
512 #else
513  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
514  return true;
515  } else {
516  expected = data_;
517  return false;
518  }
519 #endif
520  }
521 
522  inline bool compare_exchange_weak(value_type &expected, value_type desired, ::util::lock::memory_order success,
523  ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
524 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
526  typedef typename int_opr_t::opr_t opr_t;
527  if (expected == static_cast<value_type>(int_opr_t::cas(static_cast<opr_t *>(&data_), static_cast<opr_t>(desired),
528  static_cast<opr_t>(expected), success))) {
529  return true;
530  } else {
531  expected = data_;
532  return false;
533  }
534 
535 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
536  return __atomic_compare_exchange_n(&data_, &expected, desired, true, success, failure);
537 #else
538  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
539  return true;
540  } else {
541  expected = data_;
542  return false;
543  }
544 #endif
545  }
546 
547  inline bool compare_exchange_weak(value_type &expected, value_type desired,
548  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
549 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
551  typedef typename int_opr_t::opr_t opr_t;
552  if (expected == static_cast<value_type>(int_opr_t::cas(reinterpret_cast<volatile opr_t *>(&data_),
553  static_cast<opr_t>(desired), static_cast<opr_t>(expected), order))) {
554  return true;
555  } else {
556  expected = data_;
557  return false;
558  }
559 
560 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
561  return __atomic_compare_exchange_n(&data_, &expected, desired, true, order, order);
562 #else
563  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
564  return true;
565  } else {
566  expected = data_;
567  return false;
568  }
569 #endif
570  }
571 
572  inline bool
573  compare_exchange_weak(value_type &expected, value_type desired,
574  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
575 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
577  typedef typename int_opr_t::opr_t opr_t;
578  if (expected == static_cast<value_type>(int_opr_t::cas(static_cast<opr_t *>(&data_), static_cast<opr_t>(desired),
579  static_cast<opr_t>(expected), order))) {
580  return true;
581  } else {
582  expected = data_;
583  return false;
584  }
585 
586 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
587  return __atomic_compare_exchange_n(&data_, &expected, desired, true, order, order);
588 #else
589  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
590  return true;
591  } else {
592  expected = data_;
593  return false;
594  }
595 #endif
596  }
597 
598  inline bool compare_exchange_strong(value_type &expected, value_type desired, ::util::lock::memory_order success,
599  ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
600 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
602  typedef typename int_opr_t::opr_t opr_t;
603  if (expected == static_cast<value_type>(int_opr_t::cas(static_cast<opr_t *>(&data_), static_cast<opr_t>(desired),
604  static_cast<opr_t>(expected), success))) {
605  return true;
606  } else {
607  expected = data_;
608  return false;
609  }
610 
611 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
612  return __atomic_compare_exchange_n(&data_, &expected, desired, false, success, failure);
613 #else
614  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
615  return true;
616  } else {
617  expected = data_;
618  return false;
619  }
620 #endif
621  }
622 
623  inline bool compare_exchange_strong(value_type &expected, value_type desired, ::util::lock::memory_order success,
624  ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
625 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
627  typedef typename int_opr_t::opr_t opr_t;
628  if (expected == static_cast<value_type>(int_opr_t::cas(static_cast<opr_t *>(&data_), static_cast<opr_t>(desired),
629  static_cast<opr_t>(expected), success))) {
630  return true;
631  } else {
632  expected = data_;
633  return false;
634  }
635 
636 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
637  return __atomic_compare_exchange_n(&data_, &expected, desired, false, success, failure);
638 #else
639  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
640  return true;
641  } else {
642  expected = data_;
643  return false;
644  }
645 #endif
646  }
647 
648  inline bool
649  compare_exchange_strong(value_type &expected, value_type desired,
650  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
651 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
653  typedef typename int_opr_t::opr_t opr_t;
654  if (expected == static_cast<value_type>(int_opr_t::cas(reinterpret_cast<volatile opr_t *>(&data_),
655  static_cast<opr_t>(desired), static_cast<opr_t>(expected), order))) {
656  return true;
657  } else {
658  expected = data_;
659  return false;
660  }
661 
662 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
663  return __atomic_compare_exchange_n(&data_, &expected, desired, false, order, order);
664 #else
665  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
666  return true;
667  } else {
668  expected = data_;
669  return false;
670  }
671 #endif
672  }
673 
674  inline bool
675  compare_exchange_strong(value_type &expected, value_type desired,
676  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
677 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
679  typedef typename int_opr_t::opr_t opr_t;
680  if (expected == static_cast<value_type>(int_opr_t::cas(static_cast<opr_t *>(&data_), static_cast<opr_t>(desired),
681  static_cast<opr_t>(expected), order))) {
682  return true;
683  } else {
684  expected = data_;
685  return false;
686  }
687 
688 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
689  return __atomic_compare_exchange_n(&data_, &expected, desired, false, order, order);
690 #else
691  if (__sync_bool_compare_and_swap(&data_, expected, desired)) {
692  return true;
693  } else {
694  expected = data_;
695  return false;
696  }
697 #endif
698  }
699 
700  inline value_type fetch_add(value_type arg,
701  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
702 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
704  typedef typename int_opr_t::opr_t opr_t;
705  return static_cast<value_type>(int_opr_t::add(reinterpret_cast<volatile opr_t *>(&data_), static_cast<opr_t>(arg), order)) -
706  arg;
707 
708 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
709  return __atomic_fetch_add(&data_, arg, order);
710 #else
711  return __sync_fetch_and_add(&data_, arg);
712 #endif
713  }
714  inline value_type
715  fetch_add(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
716 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
718  typedef typename int_opr_t::opr_t opr_t;
719  return static_cast<value_type>(int_opr_t::add(static_cast<opr_t *>(&data_), static_cast<opr_t>(arg), order)) - arg;
720 
721 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
722  return __atomic_fetch_add(&data_, arg, order);
723 #else
724  return __sync_fetch_and_add(&data_, arg);
725 #endif
726  }
727 
728  inline value_type fetch_sub(value_type arg,
729  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
730 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
732  typedef typename int_opr_t::opr_t opr_t;
733  return static_cast<value_type>(int_opr_t::sub(reinterpret_cast<volatile opr_t *>(&data_), static_cast<opr_t>(arg), order)) +
734  arg;
735 
736 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
737  return __atomic_fetch_sub(&data_, arg, order);
738 #else
739  return __sync_fetch_and_sub(&data_, arg);
740 #endif
741  }
742  inline value_type
743  fetch_sub(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
744 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
746  typedef typename int_opr_t::opr_t opr_t;
747  return static_cast<value_type>(int_opr_t::sub(static_cast<opr_t *>(&data_), static_cast<opr_t>(arg), order)) + arg;
748 
749 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
750  return __atomic_fetch_sub(&data_, arg, order);
751 #else
752  return __sync_fetch_and_sub(&data_, arg);
753 #endif
754  }
755 
756  inline value_type fetch_and(value_type arg,
757  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
758 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
760  typedef typename int_opr_t::opr_t opr_t;
761  return static_cast<value_type>(int_opr_t::and(reinterpret_cast<volatile opr_t *>(&data_), static_cast<opr_t>(arg), order));
762 
763 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
764  return __atomic_fetch_and(&data_, arg, order);
765 #else
766  return __sync_fetch_and_and(&data_, arg);
767 #endif
768  }
769  inline value_type
770  fetch_and(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
771 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
773  typedef typename int_opr_t::opr_t opr_t;
774  return static_cast<value_type>(int_opr_t::and(static_cast<opr_t *>(&data_), static_cast<opr_t>(arg), order));
775 
776 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
777  return __atomic_fetch_and(&data_, arg, order);
778 #else
779  return __sync_fetch_and_and(&data_, arg);
780 #endif
781  }
782 
783  inline value_type fetch_or(value_type arg,
784  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
785 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
787  typedef typename int_opr_t::opr_t opr_t;
788  return static_cast<value_type>(int_opr_t:: or (reinterpret_cast<volatile opr_t *>(&data_), static_cast<opr_t>(arg), order));
789 
790 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
791  return __atomic_fetch_or(&data_, arg, order);
792 #else
793  return __sync_fetch_and_or(&data_, arg);
794 #endif
795  }
796  inline value_type
797  fetch_or(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
798 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
800  typedef typename int_opr_t::opr_t opr_t;
801  return static_cast<value_type>(int_opr_t:: or (static_cast<opr_t *>(&data_), static_cast<opr_t>(arg), order));
802 
803 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
804  return __atomic_fetch_or(&data_, arg, order);
805 #else
806  return __sync_fetch_and_or(&data_, arg);
807 #endif
808  }
809 
810  inline value_type fetch_xor(value_type arg,
811  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
812 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
814  typedef typename int_opr_t::opr_t opr_t;
815  return static_cast<value_type>(int_opr_t:: xor
816  (reinterpret_cast<volatile opr_t *>(&data_), static_cast<opr_t>(arg), order));
817 
818 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
819  return __atomic_fetch_xor(&data_, arg, order);
820 #else
821  return __sync_fetch_and_xor(&data_, arg);
822 #endif
823  }
824  inline value_type
825  fetch_xor(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
826 #ifdef __UTIL_LOCK_ATOMIC_INT_ATOMIC_MSVC
828  typedef typename int_opr_t::opr_t opr_t;
829  return static_cast<value_type>(int_opr_t:: xor (static_cast<opr_t *>(&data_), static_cast<opr_t>(arg), order));
830 
831 #elif defined(__UTIL_LOCK_ATOMIC_INT_ATOMIC_GCC_ATOMIC)
832  return __atomic_fetch_xor(&data_, arg, order);
833 #else
834  return __sync_fetch_and_xor(&data_, arg);
835 #endif
836  }
837  };
838 
839 #endif
840 
841  // used for unsafe (not multi-thread safe)
842  template <typename Ty = int>
844  typedef Ty value_type;
845  };
846 
847  template <typename Ty>
849  public:
851 
852  private:
853  value_type data_;
854  atomic_int_type(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
855 #ifndef _MSC_VER
856  atomic_int_type &operator=(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION;
857  atomic_int_type &operator=(const atomic_int_type &) volatile UTIL_CONFIG_DELETED_FUNCTION;
858 #endif
859 
860  public:
861  atomic_int_type() UTIL_CONFIG_NOEXCEPT : data_() {}
862  atomic_int_type(value_type desired) UTIL_CONFIG_NOEXCEPT : data_(desired) {}
863 
864  inline void store(value_type desired,
865  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
866  data_ = desired;
867  }
868  inline void store(value_type desired,
869  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
870  data_ = desired;
871  }
872 
873  inline value_type load(::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) const UTIL_CONFIG_NOEXCEPT {
874  return data_;
875  }
877  volatile UTIL_CONFIG_NOEXCEPT {
878  return data_;
879  }
880 
881  inline operator value_type() const UTIL_CONFIG_NOEXCEPT { return load(); }
882  inline operator value_type() const volatile UTIL_CONFIG_NOEXCEPT { return load(); }
883 
884  inline value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT { store(desired); }
885  inline value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT { store(desired); }
886 
887  inline value_type operator++() UTIL_CONFIG_NOEXCEPT { return ++data_; }
888  inline value_type operator++() volatile UTIL_CONFIG_NOEXCEPT { return ++data_; }
889  inline value_type operator++(int)UTIL_CONFIG_NOEXCEPT { return data_++; }
890  inline value_type operator++(int)volatile UTIL_CONFIG_NOEXCEPT { return data_++; }
891  inline value_type operator--() UTIL_CONFIG_NOEXCEPT { return --data_; }
892  inline value_type operator--() volatile UTIL_CONFIG_NOEXCEPT { return --data_; }
893  inline value_type operator--(int)UTIL_CONFIG_NOEXCEPT { return data_--; }
894  inline value_type operator--(int)volatile UTIL_CONFIG_NOEXCEPT { return data_--; }
895 
896  inline value_type exchange(value_type desired,
897  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
898  value_type ret = data_;
899  data_ = desired;
900  return ret;
901  }
902  inline value_type
903  exchange(value_type desired,
904  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
905  value_type ret = data_;
906  data_ = desired;
907  return ret;
908  }
909 
910  private:
911  inline bool cas(value_type &expected, value_type desired) UTIL_CONFIG_NOEXCEPT {
912  if (likely(data_ == expected)) {
913  data_ = desired;
914  return true;
915  } else {
916  expected = data_;
917  return false;
918  }
919  }
920 
921  public:
922  inline bool compare_exchange_weak(value_type &expected, value_type desired, ::util::lock::memory_order success,
923  ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
924  return cas(expected, desired);
925  }
926  inline bool compare_exchange_weak(value_type &expected, value_type desired, ::util::lock::memory_order success,
927  ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
928  return cas(expected, desired);
929  }
930 
931  inline bool compare_exchange_weak(value_type &expected, value_type desired,
932  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
933  return cas(expected, desired);
934  }
935  inline bool
936  compare_exchange_weak(value_type &expected, value_type desired,
937  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
938  return cas(expected, desired);
939  }
940 
941  inline bool compare_exchange_strong(value_type &expected, value_type desired, ::util::lock::memory_order success,
942  ::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT {
943  return cas(expected, desired);
944  }
945  inline bool compare_exchange_strong(value_type &expected, value_type desired, ::util::lock::memory_order success,
946  ::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT {
947  return cas(expected, desired);
948  }
949 
950  inline bool
951  compare_exchange_strong(value_type &expected, value_type desired,
952  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
953  return cas(expected, desired);
954  }
955  inline bool
956  compare_exchange_strong(value_type &expected, value_type desired,
957  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
958  return cas(expected, desired);
959  }
960 
961  inline value_type fetch_add(value_type arg,
962  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
963  value_type ret = data_;
964  data_ += arg;
965  return ret;
966  }
967  inline value_type
968  fetch_add(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
969  value_type ret = data_;
970  data_ += arg;
971  return ret;
972  }
973 
974  inline value_type fetch_sub(value_type arg,
975  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
976  value_type ret = data_;
977  data_ -= arg;
978  return ret;
979  }
980  inline value_type
981  fetch_sub(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
982  value_type ret = data_;
983  data_ -= arg;
984  return ret;
985  }
986 
987  inline value_type fetch_and(value_type arg,
988  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
989  value_type ret = data_;
990  data_ &= arg;
991  return ret;
992  }
993  inline value_type
994  fetch_and(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
995  value_type ret = data_;
996  data_ &= arg;
997  return ret;
998  }
999 
1000  inline value_type fetch_or(value_type arg,
1001  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
1002  value_type ret = data_;
1003  data_ |= arg;
1004  return ret;
1005  }
1006  inline value_type
1007  fetch_or(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
1008  value_type ret = data_;
1009  data_ |= arg;
1010  return ret;
1011  }
1012 
1013  inline value_type fetch_xor(value_type arg,
1014  ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT {
1015  value_type ret = data_;
1016  data_ ^= arg;
1017  return ret;
1018  }
1019  inline value_type
1020  fetch_xor(value_type arg, ::util::lock::memory_order order = ::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT {
1021  value_type ret = data_;
1022  data_ ^= arg;
1023  return ret;
1024  }
1025  };
1026  }
1027 } // namespace util
1028 
1029 #endif /* _UTIL_LOCK_ATOMIC_INT_TYPE_H_ */
value_type operator++(int) UTIL_CONFIG_NOEXCEPT
value_type fetch_add(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type exchange(value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_or(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_and(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type exchange(value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_and(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT
value_type operator--() volatile UTIL_CONFIG_NOEXCEPT
value_type operator++(int) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_sub(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired,::util::lock::memory_order success,::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT
value_type operator--() UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired,::util::lock::memory_order success,::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT
void store(value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type operator--(int) UTIL_CONFIG_NOEXCEPT
value_type operator--(int) volatile UTIL_CONFIG_NOEXCEPT
value_type load(::util::lock::memory_order order=::util::lock::memory_order_seq_cst) const volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_and(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type operator++(int) volatile UTIL_CONFIG_NOEXCEPT
value_type operator++(int) UTIL_CONFIG_NOEXCEPT
atomic_int_type() UTIL_CONFIG_NOEXCEPT
value_type load(::util::lock::memory_order order=::util::lock::memory_order_seq_cst) const volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_xor(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type operator=(value_type desired) volatile UTIL_CONFIG_NOEXCEPT
bool cas(value_type &expected, value_type desired) UTIL_CONFIG_NOEXCEPT
value_type exchange(value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_sub(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_add(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_sub(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type load(::util::lock::memory_order order=::util::lock::memory_order_seq_cst) const UTIL_CONFIG_NOEXCEPT
value_type fetch_add(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_add(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_xor(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type fetch_sub(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
volatile value_type data_
atomic_int_type(value_type desired) UTIL_CONFIG_NOEXCEPT
#define likely(x)
Definition: features.h:128
bool compare_exchange_strong(value_type &expected, value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
void store(value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired,::util::lock::memory_order success,::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
atomic_int_type & operator=(const atomic_int_type &) UTIL_CONFIG_DELETED_FUNCTION
value_type operator++() volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired,::util::lock::memory_order success,::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_xor(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
void store(value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type operator--(int) volatile UTIL_CONFIG_NOEXCEPT
void store(value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_or(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_or(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type operator--() volatile UTIL_CONFIG_NOEXCEPT
value_type operator++() UTIL_CONFIG_NOEXCEPT
value_type fetch_and(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type operator++() volatile UTIL_CONFIG_NOEXCEPT
value_type fetch_or(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
value_type load(::util::lock::memory_order order=::util::lock::memory_order_seq_cst) const UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired,::util::lock::memory_order success,::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT
value_type exchange(value_type desired,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) UTIL_CONFIG_NOEXCEPT
整数类型的原子操作-MSVC统一接口 Licensed under the MIT licenses.
value_type operator--(int) UTIL_CONFIG_NOEXCEPT
atomic_int_type(value_type desired) UTIL_CONFIG_NOEXCEPT
value_type fetch_xor(value_type arg,::util::lock::memory_order order=::util::lock::memory_order_seq_cst) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired,::util::lock::memory_order success,::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT
value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT
bool compare_exchange_weak(value_type &expected, value_type desired,::util::lock::memory_order success,::util::lock::memory_order failure) volatile UTIL_CONFIG_NOEXCEPT
bool compare_exchange_strong(value_type &expected, value_type desired,::util::lock::memory_order success,::util::lock::memory_order failure) UTIL_CONFIG_NOEXCEPT
value_type operator=(value_type desired) UTIL_CONFIG_NOEXCEPT