3 #include <libcopp/utils/config/libcopp_build_features.h>
11 #if defined(THREAD_TLS_USE_PTHREAD) && THREAD_TLS_USE_PTHREAD
19 LIBCOPP_COPP_NAMESPACE_BEGIN
25 static LIBCOPP_COPP_NAMESPACE_ID::util::lock::atomic_int_type<uint64_t> seq_alloc(0);
28 static constexpr
const size_t seq_bits = 24;
29 static constexpr
const uint64_t time_mask = (
static_cast<uint64_t
>(1) << 32) - 1;
34 uint64_t res = seq_alloc.load();
35 uint64_t time_part = res >> seq_bits;
37 uint64_t next_ret = res + 1;
38 uint64_t next_time_part = next_ret >> seq_bits;
39 if (0 == time_part || time_part != next_time_part) {
40 uint64_t now_time = time_part;
41 while (time_part == now_time) {
42 now_time = (
static_cast<uint64_t
>(time(
nullptr)) & time_mask) - 1577836800;
46 if (seq_alloc.compare_exchange_strong(res, now_time << seq_bits,
49 ret = now_time << seq_bits;
67 #if defined(THREAD_TLS_USE_PTHREAD) && THREAD_TLS_USE_PTHREAD
68 static pthread_once_t gt_uint64_id_allocator_tls_once = PTHREAD_ONCE_INIT;
69 static pthread_key_t gt_uint64_id_allocator_tls_key;
71 static void dtor_pthread_uint64_id_allocator_tls(
void *p) {
73 if (
nullptr != cache) {
78 static void init_pthread_uint64_id_allocator_tls() {
79 (void)pthread_key_create(>_uint64_id_allocator_tls_key, dtor_pthread_uint64_id_allocator_tls);
82 struct gt_uint64_id_allocator_tls_cache_main_thread_dtor_t {
83 gt_uint64_id_allocator_tls_cache_main_thread_dtor_t() {}
85 ~gt_uint64_id_allocator_tls_cache_main_thread_dtor_t() {
86 void *cache_ptr = pthread_getspecific(gt_uint64_id_allocator_tls_key);
87 pthread_setspecific(gt_uint64_id_allocator_tls_key,
nullptr);
88 dtor_pthread_uint64_id_allocator_tls(cache_ptr);
91 static void init_pthread_get_log_tls_main_thread_dtor() {
92 static gt_uint64_id_allocator_tls_cache_main_thread_dtor_t gt_uint64_id_allocator_tls_cache_main_thread_dtor;
93 (void)gt_uint64_id_allocator_tls_cache_main_thread_dtor;
97 init_pthread_get_log_tls_main_thread_dtor();
98 (void)pthread_once(>_uint64_id_allocator_tls_once, init_pthread_uint64_id_allocator_tls);
99 uint64_id_allocator_tls_cache_t *ret =
100 reinterpret_cast<uint64_id_allocator_tls_cache_t *
>(pthread_getspecific(gt_uint64_id_allocator_tls_key));
101 if (
nullptr == ret) {
102 ret =
new uint64_id_allocator_tls_cache_t();
105 pthread_setspecific(gt_uint64_id_allocator_tls_key, ret);
121 if (
nullptr == tls_cache) {
125 static constexpr
const uint64_t tls_cache_count = (
static_cast<uint64_t
>(1) << 5);
127 while (0 == tls_cache->
base || tls_cache->
inner_seq >= tls_cache_count) {
137 LIBCOPP_COPP_NAMESPACE_END
atomic wrapper fo integers Licensed under the MIT licenses.
static void deallocate(value_type) LIBCOPP_MACRO_NOEXCEPT
static value_type allocate() LIBCOPP_MACRO_NOEXCEPT
static uint64_t allocate_id_by_atomic()
uint64_id_allocator_tls_cache_t * get_uint64_id_allocator_tls_cache()