[TD-13762]<fix>: redefine atomic api.
This commit is contained in:
parent
a409c076ff
commit
a263f72d58
|
@ -85,11 +85,7 @@ typedef struct taosField {
|
|||
int32_t bytes;
|
||||
} TAOS_FIELD;
|
||||
|
||||
#ifdef _TD_GO_DLL_
|
||||
#define DLL_EXPORT __declspec(dllexport)
|
||||
#else
|
||||
#define DLL_EXPORT
|
||||
#endif
|
||||
|
||||
typedef void (*__taos_async_fn_t)(void *param, TAOS_RES *, int code);
|
||||
|
||||
|
|
|
@ -20,337 +20,96 @@
|
|||
extern "C" {
|
||||
#endif
|
||||
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
#define atomic_load_8(ptr) (*(char volatile*)(ptr))
|
||||
#define atomic_load_16(ptr) (*(short volatile*)(ptr))
|
||||
#define atomic_load_32(ptr) (*(long volatile*)(ptr))
|
||||
#define atomic_load_64(ptr) (*(__int64 volatile*)(ptr))
|
||||
#define atomic_load_ptr(ptr) (*(void* volatile*)(ptr))
|
||||
|
||||
#define atomic_store_8(ptr, val) ((*(char volatile*)(ptr)) = (char)(val))
|
||||
#define atomic_store_16(ptr, val) ((*(short volatile*)(ptr)) = (short)(val))
|
||||
#define atomic_store_32(ptr, val) ((*(long volatile*)(ptr)) = (long)(val))
|
||||
#define atomic_store_64(ptr, val) ((*(__int64 volatile*)(ptr)) = (__int64)(val))
|
||||
#define atomic_store_ptr(ptr, val) ((*(void* volatile*)(ptr)) = (void*)(val))
|
||||
|
||||
#define atomic_exchange_8(ptr, val) _InterlockedExchange8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_exchange_16(ptr, val) _InterlockedExchange16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_exchange_32(ptr, val) _InterlockedExchange((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_exchange_64(ptr, val) _InterlockedExchange64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
#ifdef _WIN64
|
||||
#define atomic_exchange_ptr(ptr, val) _InterlockedExchangePointer((void* volatile*)(ptr), (void*)(val))
|
||||
#else
|
||||
#define atomic_exchange_ptr(ptr, val) _InlineInterlockedExchangePointer((void* volatile*)(ptr), (void*)(val))
|
||||
#endif
|
||||
|
||||
#ifdef _TD_GO_DLL_
|
||||
#define atomic_val_compare_exchange_8 __sync_val_compare_and_swap
|
||||
#else
|
||||
#define atomic_val_compare_exchange_8(ptr, oldval, newval) _InterlockedCompareExchange8((char volatile*)(ptr), (char)(newval), (char)(oldval))
|
||||
#endif
|
||||
#define atomic_val_compare_exchange_16(ptr, oldval, newval) _InterlockedCompareExchange16((short volatile*)(ptr), (short)(newval), (short)(oldval))
|
||||
#define atomic_val_compare_exchange_32(ptr, oldval, newval) _InterlockedCompareExchange((long volatile*)(ptr), (long)(newval), (long)(oldval))
|
||||
#define atomic_val_compare_exchange_64(ptr, oldval, newval) _InterlockedCompareExchange64((__int64 volatile*)(ptr), (__int64)(newval), (__int64)(oldval))
|
||||
#define atomic_val_compare_exchange_ptr(ptr, oldval, newval) _InterlockedCompareExchangePointer((void* volatile*)(ptr), (void*)(newval), (void*)(oldval))
|
||||
|
||||
char interlocked_add_fetch_8(char volatile *ptr, char val);
|
||||
short interlocked_add_fetch_16(short volatile *ptr, short val);
|
||||
long interlocked_add_fetch_32(long volatile *ptr, long val);
|
||||
__int64 interlocked_add_fetch_64(__int64 volatile *ptr, __int64 val);
|
||||
|
||||
char interlocked_and_fetch_8(char volatile* ptr, char val);
|
||||
short interlocked_and_fetch_16(short volatile* ptr, short val);
|
||||
long interlocked_and_fetch_32(long volatile* ptr, long val);
|
||||
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val);
|
||||
|
||||
__int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val);
|
||||
|
||||
char interlocked_or_fetch_8(char volatile* ptr, char val);
|
||||
short interlocked_or_fetch_16(short volatile* ptr, short val);
|
||||
long interlocked_or_fetch_32(long volatile* ptr, long val);
|
||||
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val);
|
||||
|
||||
char interlocked_xor_fetch_8(char volatile* ptr, char val);
|
||||
short interlocked_xor_fetch_16(short volatile* ptr, short val);
|
||||
long interlocked_xor_fetch_32(long volatile* ptr, long val);
|
||||
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val);
|
||||
|
||||
__int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val);
|
||||
|
||||
#define atomic_add_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_add_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_add_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_add_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
#ifdef _TD_GO_DLL_
|
||||
#define atomic_fetch_add_8 __sync_fetch_and_ad
|
||||
#define atomic_fetch_add_16 __sync_fetch_and_add
|
||||
#else
|
||||
#define atomic_fetch_add_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_fetch_add_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), (short)(val))
|
||||
#endif
|
||||
#define atomic_fetch_add_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_fetch_add_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_fetch_add_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_fetch_add_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
|
||||
#define atomic_sub_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), -(char)(val))
|
||||
#define atomic_sub_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), -(short)(val))
|
||||
#define atomic_sub_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), -(long)(val))
|
||||
#define atomic_sub_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), -(__int64)(val))
|
||||
|
||||
#define atomic_fetch_sub_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), -(char)(val))
|
||||
#define atomic_fetch_sub_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), -(short)(val))
|
||||
#define atomic_fetch_sub_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), -(long)(val))
|
||||
#define atomic_fetch_sub_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), -(__int64)(val))
|
||||
|
||||
#define atomic_and_fetch_8(ptr, val) interlocked_and_fetch_8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_and_fetch_16(ptr, val) interlocked_and_fetch_16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_and_fetch_32(ptr, val) interlocked_and_fetch_32((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_and_fetch_64(ptr, val) interlocked_and_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
|
||||
#define atomic_fetch_and_8(ptr, val) _InterlockedAnd8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_fetch_and_16(ptr, val) _InterlockedAnd16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_fetch_and_32(ptr, val) _InterlockedAnd((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_fetch_and_64(ptr, val) interlocked_fetch_and_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
|
||||
#define atomic_or_fetch_8(ptr, val) interlocked_or_fetch_8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_or_fetch_16(ptr, val) interlocked_or_fetch_16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_or_fetch_32(ptr, val) interlocked_or_fetch_32((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_or_fetch_64(ptr, val) interlocked_or_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
|
||||
#define atomic_fetch_or_8(ptr, val) _InterlockedOr8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_fetch_or_16(ptr, val) _InterlockedOr16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_fetch_or_32(ptr, val) _InterlockedOr((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_fetch_or_64(ptr, val) interlocked_fetch_or_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
|
||||
#define atomic_xor_fetch_8(ptr, val) interlocked_xor_fetch_8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_xor_fetch_16(ptr, val) interlocked_xor_fetch_16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_xor_fetch_32(ptr, val) interlocked_xor_fetch_32((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_xor_fetch_64(ptr, val) interlocked_xor_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
|
||||
#define atomic_fetch_xor_8(ptr, val) _InterlockedXor8((char volatile*)(ptr), (char)(val))
|
||||
#define atomic_fetch_xor_16(ptr, val) _InterlockedXor16((short volatile*)(ptr), (short)(val))
|
||||
#define atomic_fetch_xor_32(ptr, val) _InterlockedXor((long volatile*)(ptr), (long)(val))
|
||||
#define atomic_fetch_xor_64(ptr, val) interlocked_fetch_xor_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||
|
||||
#ifdef _WIN64
|
||||
#define atomic_add_fetch_ptr atomic_add_fetch_64
|
||||
#define atomic_fetch_add_ptr atomic_fetch_add_64
|
||||
#define atomic_sub_fetch_ptr atomic_sub_fetch_64
|
||||
#define atomic_fetch_sub_ptr atomic_fetch_sub_64
|
||||
#define atomic_and_fetch_ptr atomic_and_fetch_64
|
||||
#define atomic_fetch_and_ptr atomic_fetch_and_64
|
||||
#define atomic_or_fetch_ptr atomic_or_fetch_64
|
||||
#define atomic_fetch_or_ptr atomic_fetch_or_64
|
||||
#define atomic_xor_fetch_ptr atomic_xor_fetch_64
|
||||
#define atomic_fetch_xor_ptr atomic_fetch_xor_64
|
||||
#else
|
||||
#define atomic_add_fetch_ptr atomic_add_fetch_32
|
||||
#define atomic_fetch_add_ptr atomic_fetch_add_32
|
||||
#define atomic_sub_fetch_ptr atomic_sub_fetch_32
|
||||
#define atomic_fetch_sub_ptr atomic_fetch_sub_32
|
||||
#define atomic_and_fetch_ptr atomic_and_fetch_32
|
||||
#define atomic_fetch_and_ptr atomic_fetch_and_32
|
||||
#define atomic_or_fetch_ptr atomic_or_fetch_32
|
||||
#define atomic_fetch_or_ptr atomic_fetch_or_32
|
||||
#define atomic_xor_fetch_ptr atomic_xor_fetch_32
|
||||
#define atomic_fetch_xor_ptr atomic_fetch_xor_32
|
||||
#endif
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
/*
|
||||
* type __sync_fetch_and_add (type *ptr, type value);
|
||||
* type __sync_fetch_and_sub (type *ptr, type value);
|
||||
* type __sync_fetch_and_or (type *ptr, type value);
|
||||
* type __sync_fetch_and_and (type *ptr, type value);
|
||||
* type __sync_fetch_and_xor (type *ptr, type value);
|
||||
* type __sync_fetch_and_nand (type *ptr, type value);
|
||||
* type __sync_add_and_fetch (type *ptr, type value);
|
||||
* type __sync_sub_and_fetch (type *ptr, type value);
|
||||
* type __sync_or_and_fetch (type *ptr, type value);
|
||||
* type __sync_and_and_fetch (type *ptr, type value);
|
||||
* type __sync_xor_and_fetch (type *ptr, type value);
|
||||
* type __sync_nand_and_fetch (type *ptr, type value);
|
||||
*
|
||||
* bool __sync_bool_compare_and_swap (type*ptr, type oldval, type newval, ...)
|
||||
* type __sync_val_compare_and_swap (type *ptr, type oldval, ?type newval, ...)
|
||||
* */
|
||||
|
||||
#define atomic_load_8(ptr) __sync_fetch_and_add((ptr), 0)
|
||||
#define atomic_load_16(ptr) __sync_fetch_and_add((ptr), 0)
|
||||
#define atomic_load_32(ptr) __sync_fetch_and_add((ptr), 0)
|
||||
#define atomic_load_64(ptr) __sync_fetch_and_add((ptr), 0)
|
||||
#define atomic_load_ptr(ptr) __sync_fetch_and_add((ptr), 0)
|
||||
|
||||
#define atomic_store_8(ptr, val) (*(ptr)=(val))
|
||||
#define atomic_store_16(ptr, val) (*(ptr)=(val))
|
||||
#define atomic_store_32(ptr, val) (*(ptr)=(val))
|
||||
#define atomic_store_64(ptr, val) (*(ptr)=(val))
|
||||
#define atomic_store_ptr(ptr, val) (*(ptr)=(val))
|
||||
|
||||
int8_t atomic_exchange_8_impl(int8_t* ptr, int8_t val );
|
||||
int16_t atomic_exchange_16_impl(int16_t* ptr, int16_t val );
|
||||
int32_t atomic_exchange_32_impl(int32_t* ptr, int32_t val );
|
||||
int64_t atomic_exchange_64_impl(int64_t* ptr, int64_t val );
|
||||
void* atomic_exchange_ptr_impl( void **ptr, void *val );
|
||||
|
||||
#define atomic_exchange_8(ptr, val) atomic_exchange_8_impl((int8_t*)ptr, (int8_t)val)
|
||||
#define atomic_exchange_16(ptr, val) atomic_exchange_16_impl((int16_t*)ptr, (int16_t)val)
|
||||
#define atomic_exchange_32(ptr, val) atomic_exchange_32_impl((int32_t*)ptr, (int32_t)val)
|
||||
#define atomic_exchange_64(ptr, val) atomic_exchange_64_impl((int64_t*)ptr, (int64_t)val)
|
||||
#define atomic_exchange_ptr(ptr, val) atomic_exchange_ptr_impl((void **)ptr, (void*)val)
|
||||
|
||||
#define atomic_val_compare_exchange_8 __sync_val_compare_and_swap
|
||||
#define atomic_val_compare_exchange_16 __sync_val_compare_and_swap
|
||||
#define atomic_val_compare_exchange_32 __sync_val_compare_and_swap
|
||||
#define atomic_val_compare_exchange_64 __sync_val_compare_and_swap
|
||||
#define atomic_val_compare_exchange_ptr __sync_val_compare_and_swap
|
||||
|
||||
#define atomic_add_fetch_8(ptr, val) __sync_add_and_fetch((ptr), (val))
|
||||
#define atomic_add_fetch_16(ptr, val) __sync_add_and_fetch((ptr), (val))
|
||||
#define atomic_add_fetch_32(ptr, val) __sync_add_and_fetch((ptr), (val))
|
||||
#define atomic_add_fetch_64(ptr, val) __sync_add_and_fetch((ptr), (val))
|
||||
#define atomic_add_fetch_ptr(ptr, val) __sync_add_and_fetch((ptr), (val))
|
||||
|
||||
#define atomic_fetch_add_8(ptr, val) __sync_fetch_and_add((ptr), (val))
|
||||
#define atomic_fetch_add_16(ptr, val) __sync_fetch_and_add((ptr), (val))
|
||||
#define atomic_fetch_add_32(ptr, val) __sync_fetch_and_add((ptr), (val))
|
||||
#define atomic_fetch_add_64(ptr, val) __sync_fetch_and_add((ptr), (val))
|
||||
#define atomic_fetch_add_ptr(ptr, val) __sync_fetch_and_add((ptr), (val))
|
||||
|
||||
#define atomic_sub_fetch_8(ptr, val) __sync_sub_and_fetch((ptr), (val))
|
||||
#define atomic_sub_fetch_16(ptr, val) __sync_sub_and_fetch((ptr), (val))
|
||||
#define atomic_sub_fetch_32(ptr, val) __sync_sub_and_fetch((ptr), (val))
|
||||
#define atomic_sub_fetch_64(ptr, val) __sync_sub_and_fetch((ptr), (val))
|
||||
#define atomic_sub_fetch_ptr(ptr, val) __sync_sub_and_fetch((ptr), (val))
|
||||
|
||||
#define atomic_fetch_sub_8(ptr, val) __sync_fetch_and_sub((ptr), (val))
|
||||
#define atomic_fetch_sub_16(ptr, val) __sync_fetch_and_sub((ptr), (val))
|
||||
#define atomic_fetch_sub_32(ptr, val) __sync_fetch_and_sub((ptr), (val))
|
||||
#define atomic_fetch_sub_64(ptr, val) __sync_fetch_and_sub((ptr), (val))
|
||||
#define atomic_fetch_sub_ptr(ptr, val) __sync_fetch_and_sub((ptr), (val))
|
||||
|
||||
#define atomic_and_fetch_8(ptr, val) __sync_and_and_fetch((ptr), (val))
|
||||
#define atomic_and_fetch_16(ptr, val) __sync_and_and_fetch((ptr), (val))
|
||||
#define atomic_and_fetch_32(ptr, val) __sync_and_and_fetch((ptr), (val))
|
||||
#define atomic_and_fetch_64(ptr, val) __sync_and_and_fetch((ptr), (val))
|
||||
#define atomic_and_fetch_ptr(ptr, val) __sync_and_and_fetch((ptr), (val))
|
||||
|
||||
#define atomic_fetch_and_8(ptr, val) __sync_fetch_and_and((ptr), (val))
|
||||
#define atomic_fetch_and_16(ptr, val) __sync_fetch_and_and((ptr), (val))
|
||||
#define atomic_fetch_and_32(ptr, val) __sync_fetch_and_and((ptr), (val))
|
||||
#define atomic_fetch_and_64(ptr, val) __sync_fetch_and_and((ptr), (val))
|
||||
#define atomic_fetch_and_ptr(ptr, val) __sync_fetch_and_and((ptr), (val))
|
||||
|
||||
#define atomic_or_fetch_8(ptr, val) __sync_or_and_fetch((ptr), (val))
|
||||
#define atomic_or_fetch_16(ptr, val) __sync_or_and_fetch((ptr), (val))
|
||||
#define atomic_or_fetch_32(ptr, val) __sync_or_and_fetch((ptr), (val))
|
||||
#define atomic_or_fetch_64(ptr, val) __sync_or_and_fetch((ptr), (val))
|
||||
#define atomic_or_fetch_ptr(ptr, val) __sync_or_and_fetch((ptr), (val))
|
||||
|
||||
#define atomic_fetch_or_8(ptr, val) __sync_fetch_and_or((ptr), (val))
|
||||
#define atomic_fetch_or_16(ptr, val) __sync_fetch_and_or((ptr), (val))
|
||||
#define atomic_fetch_or_32(ptr, val) __sync_fetch_and_or((ptr), (val))
|
||||
#define atomic_fetch_or_64(ptr, val) __sync_fetch_and_or((ptr), (val))
|
||||
#define atomic_fetch_or_ptr(ptr, val) __sync_fetch_and_or((ptr), (val))
|
||||
|
||||
#define atomic_xor_fetch_8(ptr, val) __sync_xor_and_fetch((ptr), (val))
|
||||
#define atomic_xor_fetch_16(ptr, val) __sync_xor_and_fetch((ptr), (val))
|
||||
#define atomic_xor_fetch_32(ptr, val) __sync_xor_and_fetch((ptr), (val))
|
||||
#define atomic_xor_fetch_64(ptr, val) __sync_xor_and_fetch((ptr), (val))
|
||||
#define atomic_xor_fetch_ptr(ptr, val) __sync_xor_and_fetch((ptr), (val))
|
||||
|
||||
#define atomic_fetch_xor_8(ptr, val) __sync_fetch_and_xor((ptr), (val))
|
||||
#define atomic_fetch_xor_16(ptr, val) __sync_fetch_and_xor((ptr), (val))
|
||||
#define atomic_fetch_xor_32(ptr, val) __sync_fetch_and_xor((ptr), (val))
|
||||
#define atomic_fetch_xor_64(ptr, val) __sync_fetch_and_xor((ptr), (val))
|
||||
#define atomic_fetch_xor_ptr(ptr, val) __sync_fetch_and_xor((ptr), (val))
|
||||
|
||||
#else
|
||||
#define atomic_load_8(ptr) __atomic_load_n((ptr), __ATOMIC_SEQ_CST)
|
||||
#define atomic_load_16(ptr) __atomic_load_n((ptr), __ATOMIC_SEQ_CST)
|
||||
#define atomic_load_32(ptr) __atomic_load_n((ptr), __ATOMIC_SEQ_CST)
|
||||
#define atomic_load_64(ptr) __atomic_load_n((ptr), __ATOMIC_SEQ_CST)
|
||||
#define atomic_load_ptr(ptr) __atomic_load_n((ptr), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_store_8(ptr, val) __atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_store_16(ptr, val) __atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_store_32(ptr, val) __atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_store_64(ptr, val) __atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_store_ptr(ptr, val) __atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_exchange_8(ptr, val) __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_exchange_16(ptr, val) __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_exchange_32(ptr, val) __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_exchange_64(ptr, val) __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_exchange_ptr(ptr, val) __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_val_compare_exchange_8 __sync_val_compare_and_swap
|
||||
#define atomic_val_compare_exchange_16 __sync_val_compare_and_swap
|
||||
#define atomic_val_compare_exchange_32 __sync_val_compare_and_swap
|
||||
#define atomic_val_compare_exchange_64 __sync_val_compare_and_swap
|
||||
#define atomic_val_compare_exchange_ptr __sync_val_compare_and_swap
|
||||
|
||||
#define atomic_add_fetch_8(ptr, val) __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_add_fetch_16(ptr, val) __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_add_fetch_32(ptr, val) __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_add_fetch_64(ptr, val) __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_add_fetch_ptr(ptr, val) __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_fetch_add_8(ptr, val) __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_add_16(ptr, val) __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_add_32(ptr, val) __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_add_64(ptr, val) __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_add_ptr(ptr, val) __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_sub_fetch_8(ptr, val) __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_sub_fetch_16(ptr, val) __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_sub_fetch_32(ptr, val) __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_sub_fetch_64(ptr, val) __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_sub_fetch_ptr(ptr, val) __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_fetch_sub_8(ptr, val) __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_sub_16(ptr, val) __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_sub_32(ptr, val) __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_sub_64(ptr, val) __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_sub_ptr(ptr, val) __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_and_fetch_8(ptr, val) __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_and_fetch_16(ptr, val) __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_and_fetch_32(ptr, val) __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_and_fetch_64(ptr, val) __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_and_fetch_ptr(ptr, val) __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_fetch_and_8(ptr, val) __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_and_16(ptr, val) __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_and_32(ptr, val) __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_and_64(ptr, val) __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_and_ptr(ptr, val) __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_or_fetch_8(ptr, val) __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_or_fetch_16(ptr, val) __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_or_fetch_32(ptr, val) __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_or_fetch_64(ptr, val) __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_or_fetch_ptr(ptr, val) __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_fetch_or_8(ptr, val) __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_or_16(ptr, val) __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_or_32(ptr, val) __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_or_64(ptr, val) __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_or_ptr(ptr, val) __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_xor_fetch_8(ptr, val) __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_xor_fetch_16(ptr, val) __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_xor_fetch_32(ptr, val) __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_xor_fetch_64(ptr, val) __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_xor_fetch_ptr(ptr, val) __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
|
||||
#define atomic_fetch_xor_8(ptr, val) __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_xor_16(ptr, val) __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_xor_32(ptr, val) __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_xor_64(ptr, val) __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
#define atomic_fetch_xor_ptr(ptr, val) __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST)
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define __atomic_load_n __ATOMIC_LOAD_N_FUNC_TAOS_FORBID
|
||||
#define __atomic_store_n __ATOMIC_STORE_N_FUNC_TAOS_FORBID
|
||||
#define __atomic_exchange_n __ATOMIC_EXCHANGE_N_FUNC_TAOS_FORBID
|
||||
#define __sync_val_compare_and_swap __SYNC_VAL_COMPARE_AND_SWAP_FUNC_TAOS_FORBID
|
||||
#define __atomic_add_fetch __ATOMIC_ADD_FETCH_FUNC_TAOS_FORBID
|
||||
#define __atomic_fetch_add __ATOMIC_FETCH_ADD_FUNC_TAOS_FORBID
|
||||
#define __atomic_sub_fetch __ATOMIC_SUB_FETCH_FUNC_TAOS_FORBID
|
||||
#define __atomic_fetch_sub __ATOMIC_FETCH_SUB_FUNC_TAOS_FORBID
|
||||
#define __atomic_and_fetch __ATOMIC_AND_FETCH_FUNC_TAOS_FORBID
|
||||
#define __atomic_fetch_and __ATOMIC_FETCH_AND_FUNC_TAOS_FORBID
|
||||
#define __atomic_or_fetch __ATOMIC_OR_FETCH_FUNC_TAOS_FORBID
|
||||
#define __atomic_fetch_or __ATOMIC_FETCH_OR_FUNC_TAOS_FORBID
|
||||
#define __atomic_xor_fetch __ATOMIC_XOR_FETCH_FUNC_TAOS_FORBID
|
||||
#define __atomic_fetch_xor __ATOMIC_FETCH_XOR_FUNC_TAOS_FORBID
|
||||
#endif
|
||||
|
||||
int8_t atomic_load_8(int8_t volatile *ptr);
|
||||
int16_t atomic_load_16(int16_t volatile *ptr);
|
||||
int32_t atomic_load_32(int32_t volatile *ptr);
|
||||
int64_t atomic_load_64(int64_t volatile *ptr);
|
||||
void* atomic_load_ptr(void *ptr);
|
||||
void atomic_store_8(int8_t volatile *ptr, int8_t val);
|
||||
void atomic_store_16(int16_t volatile *ptr, int16_t val);
|
||||
void atomic_store_32(int32_t volatile *ptr, int32_t val);
|
||||
void atomic_store_64(int64_t volatile *ptr, int64_t val);
|
||||
void atomic_store_ptr(void *ptr, void *val);
|
||||
int8_t atomic_exchange_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_exchange_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_exchange_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_exchange_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_exchange_ptr(void *ptr, void *val);
|
||||
int8_t atomic_val_compare_exchange_8(int8_t volatile *ptr, int8_t oldval, int8_t newval);
|
||||
int16_t atomic_val_compare_exchange_16(int16_t volatile *ptr, int16_t oldval, int16_t newval);
|
||||
int32_t atomic_val_compare_exchange_32(int32_t volatile *ptr, int32_t oldval, int32_t newval);
|
||||
int64_t atomic_val_compare_exchange_64(int64_t volatile *ptr, int64_t oldval, int64_t newval);
|
||||
void* atomic_val_compare_exchange_ptr(void *ptr, void *oldval, void *newval);
|
||||
int8_t atomic_add_fetch_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_add_fetch_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_add_fetch_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_add_fetch_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_add_fetch_ptr(void *ptr, int32_t val);
|
||||
int8_t atomic_fetch_add_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_fetch_add_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_fetch_add_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_fetch_add_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_fetch_add_ptr(void *ptr, int32_t val);
|
||||
int8_t atomic_sub_fetch_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_sub_fetch_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_sub_fetch_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_sub_fetch_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_sub_fetch_ptr(void *ptr, int32_t val);
|
||||
int8_t atomic_fetch_sub_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_fetch_sub_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_fetch_sub_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_fetch_sub_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_fetch_sub_ptr(void *ptr, int32_t val);
|
||||
int8_t atomic_and_fetch_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_and_fetch_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_and_fetch_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_and_fetch_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_and_fetch_ptr(void *ptr, void *val);
|
||||
int8_t atomic_fetch_and_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_fetch_and_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_fetch_and_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_fetch_and_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_fetch_and_ptr(void *ptr, void *val);
|
||||
int8_t atomic_or_fetch_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_or_fetch_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_or_fetch_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_or_fetch_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_or_fetch_ptr(void *ptr, void *val);
|
||||
int8_t atomic_fetch_or_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_fetch_or_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_fetch_or_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_fetch_or_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_fetch_or_ptr(void *ptr, void *val);
|
||||
int8_t atomic_xor_fetch_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_xor_fetch_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_xor_fetch_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_xor_fetch_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_xor_fetch_ptr(void *ptr, void *val);
|
||||
int8_t atomic_fetch_xor_8(int8_t volatile *ptr, int8_t val);
|
||||
int16_t atomic_fetch_xor_16(int16_t volatile *ptr, int16_t val);
|
||||
int32_t atomic_fetch_xor_32(int32_t volatile *ptr, int32_t val);
|
||||
int64_t atomic_fetch_xor_64(int64_t volatile *ptr, int64_t val);
|
||||
void* atomic_fetch_xor_ptr(void *ptr, void *val);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#define _TD_OS_DIR_H_
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define opendir OPENDIR_FUNC_TAOS_FORBID
|
||||
#define readdir READDIR_FUNC_TAOS_FORBID
|
||||
|
|
|
@ -23,6 +23,7 @@ extern "C" {
|
|||
#include "osSocket.h"
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following sectio
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define open OPEN_FUNC_TAOS_FORBID
|
||||
#define fopen FOPEN_FUNC_TAOS_FORBID
|
||||
|
|
|
@ -23,6 +23,7 @@ extern "C" {
|
|||
#endif
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define setlocale SETLOCALE_FUNC_TAOS_FORBID
|
||||
#endif
|
||||
|
|
|
@ -21,6 +21,7 @@ extern "C" {
|
|||
#endif
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define rand RAND_FUNC_TAOS_FORBID
|
||||
#define srand SRAND_FUNC_TAOS_FORBID
|
||||
|
|
|
@ -21,6 +21,7 @@ extern "C" {
|
|||
#endif
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define Sleep SLEEP_FUNC_TAOS_FORBID
|
||||
#define sleep SLEEP_FUNC_TAOS_FORBID
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#define _TD_OS_SOCKET_H_
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define socket SOCKET_FUNC_TAOS_FORBID
|
||||
#define bind BIND_FUNC_TAOS_FORBID
|
||||
|
@ -52,9 +53,6 @@ extern "C" {
|
|||
|
||||
#if (defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32))
|
||||
#define htobe64 htonll
|
||||
#if defined(_TD_GO_DLL_)
|
||||
uint64_t htonll(uint64_t val);
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if defined(_TD_DARWIN_64)
|
||||
|
|
|
@ -24,6 +24,7 @@ typedef wchar_t TdWchar;
|
|||
typedef int32_t TdUcs4;
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define iconv_open ICONV_OPEN_FUNC_TAOS_FORBID
|
||||
#define iconv_close ICONV_CLOSE_FUNC_TAOS_FORBID
|
||||
|
|
|
@ -21,6 +21,7 @@ extern "C" {
|
|||
#endif
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define popen POPEN_FUNC_TAOS_FORBID
|
||||
#define pclose PCLOSE_FUNC_TAOS_FORBID
|
||||
|
|
|
@ -37,6 +37,7 @@ typedef pthread_condattr_t TdThreadCondAttr;
|
|||
#define taosThreadCleanupPop pthread_cleanup_pop
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define pthread_t PTHREAD_T_TYPE_TAOS_FORBID
|
||||
#define pthread_spinlock_t PTHREAD_SPINLOCK_T_TYPE_TAOS_FORBID
|
||||
|
|
|
@ -21,6 +21,7 @@ extern "C" {
|
|||
#endif
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define strptime STRPTIME_FUNC_TAOS_FORBID
|
||||
#define gettimeofday GETTIMEOFDAY_FUNC_TAOS_FORBID
|
||||
|
@ -33,11 +34,7 @@ extern "C" {
|
|||
|
||||
#define CLOCK_REALTIME 0
|
||||
|
||||
#ifdef _TD_GO_DLL_
|
||||
#define MILLISECOND_PER_SECOND (1000LL)
|
||||
#else
|
||||
#define MILLISECOND_PER_SECOND (1000i64)
|
||||
#endif
|
||||
#define MILLISECOND_PER_SECOND (1000i64)
|
||||
#else
|
||||
#define MILLISECOND_PER_SECOND ((int64_t)1000L)
|
||||
#endif
|
||||
|
|
|
@ -21,6 +21,7 @@ extern "C" {
|
|||
#endif
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define timer_create TIMER_CREATE_FUNC_TAOS_FORBID
|
||||
#define timer_settime TIMER_SETTIME_FUNC_TAOS_FORBID
|
||||
|
|
|
@ -21,6 +21,7 @@ extern "C" {
|
|||
#endif
|
||||
|
||||
// If the error is in a third-party library, place this header file under the third-party library header file.
|
||||
// When you want to use this feature, you should find or add the same function in the following section.
|
||||
#ifndef ALLOW_FORBID_FUNC
|
||||
#define tzset TZSET_FUNC_TAOS_FORBID
|
||||
#endif
|
||||
|
|
|
@ -48,8 +48,8 @@ static void registerRequest(SRequestObj *pRequest) {
|
|||
if (pTscObj->pAppInfo) {
|
||||
SInstanceSummary *pSummary = &pTscObj->pAppInfo->summary;
|
||||
|
||||
int32_t total = atomic_add_fetch_32(&pSummary->totalRequests, 1);
|
||||
int32_t currentInst = atomic_add_fetch_32(&pSummary->currentRequests, 1);
|
||||
int32_t total = atomic_add_fetch_64(&pSummary->totalRequests, 1);
|
||||
int32_t currentInst = atomic_add_fetch_64(&pSummary->currentRequests, 1);
|
||||
tscDebug("0x%" PRIx64 " new Request from connObj:0x%" PRIx64
|
||||
", current:%d, app current:%d, total:%d, reqId:0x%" PRIx64,
|
||||
pRequest->self, pRequest->pTscObj->id, num, currentInst, total, pRequest->requestId);
|
||||
|
@ -62,7 +62,7 @@ static void deregisterRequest(SRequestObj *pRequest) {
|
|||
STscObj * pTscObj = pRequest->pTscObj;
|
||||
SInstanceSummary *pActivity = &pTscObj->pAppInfo->summary;
|
||||
|
||||
int32_t currentInst = atomic_sub_fetch_32(&pActivity->currentRequests, 1);
|
||||
int32_t currentInst = atomic_sub_fetch_64(&pActivity->currentRequests, 1);
|
||||
int32_t num = atomic_sub_fetch_32(&pTscObj->numOfReqs, 1);
|
||||
|
||||
int64_t duration = taosGetTimestampMs() - pRequest->metric.start;
|
||||
|
|
|
@ -166,7 +166,7 @@ static int32_t mndStreamActionDelete(SSdb *pSdb, SStreamObj *pStream) {
|
|||
|
||||
static int32_t mndStreamActionUpdate(SSdb *pSdb, SStreamObj *pOldStream, SStreamObj *pNewStream) {
|
||||
mTrace("stream:%s, perform update action", pOldStream->name);
|
||||
atomic_exchange_32(&pOldStream->updateTime, pNewStream->updateTime);
|
||||
atomic_exchange_64(&pOldStream->updateTime, pNewStream->updateTime);
|
||||
atomic_exchange_32(&pOldStream->version, pNewStream->version);
|
||||
|
||||
taosWLockLatch(&pOldStream->lock);
|
||||
|
|
|
@ -176,7 +176,7 @@ static int32_t mndTopicActionDelete(SSdb *pSdb, SMqTopicObj *pTopic) {
|
|||
|
||||
static int32_t mndTopicActionUpdate(SSdb *pSdb, SMqTopicObj *pOldTopic, SMqTopicObj *pNewTopic) {
|
||||
mTrace("topic:%s, perform update action", pOldTopic->name);
|
||||
atomic_exchange_32(&pOldTopic->updateTime, pNewTopic->updateTime);
|
||||
atomic_exchange_64(&pOldTopic->updateTime, pNewTopic->updateTime);
|
||||
atomic_exchange_32(&pOldTopic->version, pNewTopic->version);
|
||||
|
||||
taosWLockLatch(&pOldTopic->lock);
|
||||
|
|
|
@ -278,7 +278,7 @@ typedef struct SCtgAction {
|
|||
#define CTG_ERR_JRET(c) do { code = c; if (code != TSDB_CODE_SUCCESS) { terrno = code; goto _return; } } while (0)
|
||||
|
||||
#define CTG_API_LEAVE(c) do { int32_t __code = c; CTG_UNLOCK(CTG_READ, &gCtgMgmt.lock); CTG_API_DEBUG("CTG API leave %s", __FUNCTION__); CTG_RET(__code); } while (0)
|
||||
#define CTG_API_ENTER() do { CTG_API_DEBUG("CTG API enter %s", __FUNCTION__); CTG_LOCK(CTG_READ, &gCtgMgmt.lock); if (atomic_load_8(&gCtgMgmt.exit)) { CTG_API_LEAVE(TSDB_CODE_CTG_OUT_OF_SERVICE); } } while (0)
|
||||
#define CTG_API_ENTER() do { CTG_API_DEBUG("CTG API enter %s", __FUNCTION__); CTG_LOCK(CTG_READ, &gCtgMgmt.lock); if (atomic_load_8((int8_t*)&gCtgMgmt.exit)) { CTG_API_LEAVE(TSDB_CODE_CTG_OUT_OF_SERVICE); } } while (0)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -2044,7 +2044,7 @@ void* ctgUpdateThreadFunc(void* param) {
|
|||
while (true) {
|
||||
tsem_wait(&gCtgMgmt.sem);
|
||||
|
||||
if (atomic_load_8(&gCtgMgmt.exit)) {
|
||||
if (atomic_load_8((int8_t*)&gCtgMgmt.exit)) {
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -2090,7 +2090,7 @@ int32_t catalogInit(SCatalogCfg *cfg) {
|
|||
CTG_ERR_RET(TSDB_CODE_CTG_INVALID_INPUT);
|
||||
}
|
||||
|
||||
atomic_store_8(&gCtgMgmt.exit, false);
|
||||
atomic_store_8((int8_t*)&gCtgMgmt.exit, false);
|
||||
|
||||
if (cfg) {
|
||||
memcpy(&gCtgMgmt.cfg, cfg, sizeof(*cfg));
|
||||
|
@ -2705,11 +2705,11 @@ int32_t catalogGetExpiredDBs(SCatalog* pCtg, SDbVgVersion **dbs, uint32_t *num)
|
|||
void catalogDestroy(void) {
|
||||
qInfo("start to destroy catalog");
|
||||
|
||||
if (NULL == gCtgMgmt.pCluster || atomic_load_8(&gCtgMgmt.exit)) {
|
||||
if (NULL == gCtgMgmt.pCluster || atomic_load_8((int8_t*)&gCtgMgmt.exit)) {
|
||||
return;
|
||||
}
|
||||
|
||||
atomic_store_8(&gCtgMgmt.exit, true);
|
||||
atomic_store_8((int8_t*)&gCtgMgmt.exit, true);
|
||||
|
||||
tsem_post(&gCtgMgmt.sem);
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ typedef struct SHNode {
|
|||
typedef struct SSHashObj {
|
||||
SHNode **hashList;
|
||||
size_t capacity; // number of slots
|
||||
size_t size; // number of elements in hash table
|
||||
int64_t size; // number of elements in hash table
|
||||
_hash_fn_t hashFp; // hash function
|
||||
_equal_fn_t equalFp; // equal function
|
||||
int32_t keyLen;
|
||||
|
@ -91,7 +91,7 @@ int32_t tSimpleHashGetSize(const SSHashObj *pHashObj) {
|
|||
if (pHashObj == NULL) {
|
||||
return 0;
|
||||
}
|
||||
return (int32_t)atomic_load_64(&pHashObj->size);
|
||||
return (int32_t)atomic_load_64((int64_t*)&pHashObj->size);
|
||||
}
|
||||
|
||||
static SHNode *doCreateHashNode(const void *key, size_t keyLen, const void *pData, size_t dsize, uint32_t hashVal) {
|
||||
|
|
|
@ -97,7 +97,7 @@ bool fmIsAggFunc(int32_t funcId) {
|
|||
|
||||
void fmFuncMgtDestroy() {
|
||||
void* m = gFunMgtService.pFuncNameHashTable;
|
||||
if (m != NULL && atomic_val_compare_exchange_ptr(&gFunMgtService.pFuncNameHashTable, m, 0) == m) {
|
||||
if (m != NULL && atomic_val_compare_exchange_ptr((void**)&gFunMgtService.pFuncNameHashTable, m, 0) == m) {
|
||||
taosHashCleanup(m);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,8 +16,8 @@
|
|||
#include <iomanip>
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include "tdatablock.h"
|
||||
#include "mockCatalogService.h"
|
||||
#include "tdatablock.h"
|
||||
|
||||
#include "tname.h"
|
||||
#include "ttypes.h"
|
||||
|
|
|
@ -50,7 +50,7 @@ namespace {
|
|||
int32_t qwtTestMaxExecTaskUsec = 2;
|
||||
int32_t qwtTestReqMaxDelayUsec = 2;
|
||||
|
||||
uint64_t qwtTestQueryId = 0;
|
||||
int64_t qwtTestQueryId = 0;
|
||||
bool qwtTestEnableSleep = true;
|
||||
bool qwtTestStop = false;
|
||||
bool qwtTestDeadLoop = false;
|
||||
|
|
|
@ -13,25 +13,18 @@
|
|||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#define ALLOW_FORBID_FUNC
|
||||
#include "os.h"
|
||||
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
|
||||
// add
|
||||
char interlocked_add_fetch_8(char volatile* ptr, char val) {
|
||||
#ifdef _TD_GO_DLL_
|
||||
return __sync_fetch_and_add(ptr, val) + val;
|
||||
#else
|
||||
return _InterlockedExchangeAdd8(ptr, val) + val;
|
||||
#endif
|
||||
}
|
||||
|
||||
short interlocked_add_fetch_16(short volatile* ptr, short val) {
|
||||
#ifdef _TD_GO_DLL_
|
||||
return __sync_fetch_and_add(ptr, val) + val;
|
||||
#else
|
||||
return _InterlockedExchangeAdd16(ptr, val) + val;
|
||||
#endif
|
||||
}
|
||||
|
||||
long interlocked_add_fetch_32(long volatile* ptr, long val) {
|
||||
|
@ -39,11 +32,7 @@ long interlocked_add_fetch_32(long volatile* ptr, long val) {
|
|||
}
|
||||
|
||||
__int64 interlocked_add_fetch_64(__int64 volatile* ptr, __int64 val) {
|
||||
//#ifdef _WIN64
|
||||
return InterlockedExchangeAdd64(ptr, val) + val;
|
||||
//#else
|
||||
// return _InterlockedExchangeAdd(ptr, val) + val;
|
||||
//#endif
|
||||
}
|
||||
|
||||
char interlocked_and_fetch_8(char volatile* ptr, char val) {
|
||||
|
@ -197,3 +186,709 @@ int64_t atomic_exchange_64_impl(int64_t* ptr, int64_t val ) {
|
|||
return old;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
int8_t atomic_load_8(int8_t volatile *ptr) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return (*(int8_t volatile*)(ptr));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), 0);
|
||||
#else
|
||||
return __atomic_load_n((ptr), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_load_16(int16_t volatile *ptr) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return (*(int16_t volatile*)(ptr));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), 0);
|
||||
#else
|
||||
return __atomic_load_n((ptr), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_load_32(int32_t volatile *ptr) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return (*(int32_t volatile*)(ptr));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), 0);
|
||||
#else
|
||||
return __atomic_load_n((ptr), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_load_64(int64_t volatile *ptr) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return (*(int64_t volatile*)(ptr));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), 0);
|
||||
#else
|
||||
return __atomic_load_n((ptr), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_load_ptr(void *ptr) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return (*(void* volatile*)(ptr));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), 0);
|
||||
#else
|
||||
return __atomic_load_n((void**)(ptr), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void atomic_store_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
((*(int8_t volatile*)(ptr)) = (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
(*(ptr)=(val));
|
||||
#else
|
||||
__atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void atomic_store_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
((*(int16_t volatile*)(ptr)) = (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
(*(ptr)=(val));
|
||||
#else
|
||||
__atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void atomic_store_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
((*(int32_t volatile*)(ptr)) = (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
(*(ptr)=(val));
|
||||
#else
|
||||
__atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void atomic_store_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
((*(int64_t volatile*)(ptr)) = (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
(*(ptr)=(val));
|
||||
#else
|
||||
__atomic_store_n((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void atomic_store_ptr(void *ptr, void *val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
((*(void* volatile*)(ptr)) = (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
(*(ptr)=(val));
|
||||
#else
|
||||
__atomic_store_n((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_exchange_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchange8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return atomic_exchange_8_impl((int8_t*)ptr, (int8_t)val);
|
||||
#else
|
||||
return __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_exchange_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchange16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return atomic_exchange_16_impl((int16_t*)ptr, (int16_t)val);
|
||||
#else
|
||||
return __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_exchange_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchange((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return atomic_exchange_32_impl((int32_t*)ptr, (int32_t)val);
|
||||
#else
|
||||
return __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_exchange_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchange64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return atomic_exchange_64_impl((int64_t*)ptr, (int64_t)val);
|
||||
#else
|
||||
return __atomic_exchange_n((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_exchange_ptr(void *ptr, void *val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
#ifdef _WIN64
|
||||
return _InterlockedExchangePointer((void* volatile*)(ptr), (void*)(val));
|
||||
#else
|
||||
return _InlineInterlockedExchangePointer((void* volatile*)(ptr), (void*)(val));
|
||||
#endif
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return atomic_exchange_ptr_impl((void *)ptr, (void*)val);
|
||||
#else
|
||||
return __atomic_exchange_n((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_val_compare_exchange_8(int8_t volatile *ptr, int8_t oldval, int8_t newval) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedCompareExchange8((int8_t volatile*)(ptr), (int8_t)(newval), (int8_t)(oldval));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#else
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_val_compare_exchange_16(int16_t volatile *ptr, int16_t oldval, int16_t newval) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedCompareExchange16((int16_t volatile*)(ptr), (int16_t)(newval), (int16_t)(oldval));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#else
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_val_compare_exchange_32(int32_t volatile *ptr, int32_t oldval, int32_t newval) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedCompareExchange((int32_t volatile*)(ptr), (int32_t)(newval), (int32_t)(oldval));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#else
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_val_compare_exchange_64(int64_t volatile *ptr, int64_t oldval, int64_t newval) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedCompareExchange64((int64_t volatile*)(ptr), (int64_t)(newval), (int64_t)(oldval));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#else
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_val_compare_exchange_ptr(void *ptr, void *oldval, void *newval) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedCompareExchangePointer((void* volatile*)(ptr), (void*)(newval), (void*)(oldval));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_val_compare_and_swap(ptr, oldval, newval);
|
||||
#else
|
||||
return __sync_val_compare_and_swap((void **)ptr, oldval, newval);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_add_fetch_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_add_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_add_fetch_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_add_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_add_fetch_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_32((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_add_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_add_fetch_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_add_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_add_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_add_fetch_ptr(void *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_ptr((void* volatile*)(ptr), (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_add_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_add_fetch((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_fetch_add_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAdd8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_fetch_add_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAdd16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_fetch_add_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAdd((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_fetch_add_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAdd64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_add((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_fetch_add_ptr(void *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAddptr((void* volatile*)(ptr), (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_add((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_add((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_sub_fetch_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_8((int8_t volatile*)(ptr), -(int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_sub_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_sub_fetch_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_16((int16_t volatile*)(ptr), -(int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_sub_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_sub_fetch_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_32((int32_t volatile*)(ptr), -(int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_sub_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_sub_fetch_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_64((int64_t volatile*)(ptr), -(int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_sub_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_sub_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_sub_fetch_ptr(void *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_add_fetch_ptr((void* volatile*)(ptr), -(void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_sub_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_sub_fetch((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_fetch_sub_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAdd8((int8_t volatile*)(ptr), -(int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_sub((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_fetch_sub_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAdd16((int16_t volatile*)(ptr), -(int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_sub((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_fetch_sub_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAdd((int32_t volatile*)(ptr), -(int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_sub((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_fetch_sub_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAdd64((int64_t volatile*)(ptr), -(int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_sub((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_sub((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_fetch_sub_ptr(void *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedExchangeAddptr((void* volatile*)(ptr), -(void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_sub((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_sub((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_and_fetch_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_and_fetch_8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_and_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_and_fetch_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_and_fetch_16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_and_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_and_fetch_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_and_fetch_32((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_and_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_and_fetch_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_and_fetch_64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_and_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_and_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_and_fetch_ptr(void *ptr, void *val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_and_fetch_ptr((void* volatile*)(ptr), (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_and_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_and_fetch((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_fetch_and_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedAnd8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_and((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_fetch_and_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedAnd16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_and((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_fetch_and_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedAnd((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_and((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_fetch_and_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_fetch_and_64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_and((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_and((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_fetch_and_ptr(void *ptr, void *val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_fetch_and_ptr((void* volatile*)(ptr), (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_and((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_and((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_or_fetch_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_or_fetch_8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_or_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_or_fetch_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_or_fetch_16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_or_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_or_fetch_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_or_fetch_32((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_or_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_or_fetch_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_or_fetch_64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_or_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_or_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_or_fetch_ptr(void *ptr, void *val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_or_fetch_ptr((void* volatile*)(ptr), (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_or_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_or_fetch((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_fetch_or_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedOr8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_or((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_fetch_or_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedOr16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_or((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_fetch_or_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedOr((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_or((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_fetch_or_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_fetch_or_64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_or((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_or((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_fetch_or_ptr(void *ptr, void *val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_fetch_or_ptr((void* volatile*)(ptr), (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_or((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_or((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_xor_fetch_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_xor_fetch_8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_xor_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_xor_fetch_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_xor_fetch_16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_xor_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_xor_fetch_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_xor_fetch_32((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_xor_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_xor_fetch_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_xor_fetch_64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_xor_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_xor_fetch((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_xor_fetch_ptr(void *ptr, void *val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_xor_fetch_ptr((void* volatile*)(ptr), (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_xor_and_fetch((ptr), (val));
|
||||
#else
|
||||
return __atomic_xor_fetch((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t atomic_fetch_xor_8(int8_t volatile *ptr, int8_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedXor8((int8_t volatile*)(ptr), (int8_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_xor((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int16_t atomic_fetch_xor_16(int16_t volatile *ptr, int16_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedXor16((int16_t volatile*)(ptr), (int16_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_xor((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t atomic_fetch_xor_32(int32_t volatile *ptr, int32_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return _InterlockedXor((int32_t volatile*)(ptr), (int32_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_xor((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
int64_t atomic_fetch_xor_64(int64_t volatile *ptr, int64_t val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_fetch_xor_64((int64_t volatile*)(ptr), (int64_t)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_xor((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_xor((ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* atomic_fetch_xor_ptr(void *ptr, void *val) {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
return interlocked_fetch_xor_ptr((void* volatile*)(ptr), (void*)(val));
|
||||
#elif defined(_TD_NINGSI_60)
|
||||
return __sync_fetch_and_xor((ptr), (val));
|
||||
#else
|
||||
return __atomic_fetch_xor((void **)(ptr), (val), __ATOMIC_SEQ_CST);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -216,12 +216,6 @@ int32_t taosShutDownSocketServerRDWR(TdSocketServerPtr pSocketServer) {
|
|||
#endif
|
||||
}
|
||||
|
||||
#if (defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32))
|
||||
#if defined(_TD_GO_DLL_)
|
||||
uint64_t htonll(uint64_t val) { return (((uint64_t)htonl(val)) << 32) + htonl(val >> 32); }
|
||||
#endif
|
||||
#endif
|
||||
|
||||
void taosWinSocketInit1() {
|
||||
#if defined(_TD_WINDOWS_64) || defined(_TD_WINDOWS_32)
|
||||
static char flag = 0;
|
||||
|
|
|
@ -456,7 +456,7 @@ void *taosCacheAcquireByKey(SCacheObj *pCacheObj, const void *key, size_t keyLen
|
|||
}
|
||||
|
||||
if (pCacheObj->numOfElems == 0) {
|
||||
atomic_add_fetch_32(&pCacheObj->statistics.missCount, 1);
|
||||
atomic_add_fetch_64(&pCacheObj->statistics.missCount, 1);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
@ -475,15 +475,15 @@ void *taosCacheAcquireByKey(SCacheObj *pCacheObj, const void *key, size_t keyLen
|
|||
|
||||
void *pData = (pNode != NULL) ? pNode->data : NULL;
|
||||
if (pData != NULL) {
|
||||
atomic_add_fetch_32(&pCacheObj->statistics.hitCount, 1);
|
||||
atomic_add_fetch_64(&pCacheObj->statistics.hitCount, 1);
|
||||
uDebug("cache:%s, key:%p, %p is retrieved from cache, refcnt:%d", pCacheObj->name, key, pData,
|
||||
T_REF_VAL_GET(pNode));
|
||||
} else {
|
||||
atomic_add_fetch_32(&pCacheObj->statistics.missCount, 1);
|
||||
atomic_add_fetch_64(&pCacheObj->statistics.missCount, 1);
|
||||
uDebug("cache:%s, key:%p, not in cache, retrieved failed", pCacheObj->name, key);
|
||||
}
|
||||
|
||||
atomic_add_fetch_32(&pCacheObj->statistics.totalAccess, 1);
|
||||
atomic_add_fetch_64(&pCacheObj->statistics.totalAccess, 1);
|
||||
return pData;
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ typedef struct SHashEntry {
|
|||
struct SHashObj {
|
||||
SHashEntry ** hashList;
|
||||
size_t capacity; // number of slots
|
||||
size_t size; // number of elements in hash table
|
||||
int64_t size; // number of elements in hash table
|
||||
_hash_fn_t hashFp; // hash function
|
||||
_equal_fn_t equalFp; // equal function
|
||||
_hash_free_fn_t freeFp; // hash node free callback function
|
||||
|
@ -186,7 +186,7 @@ static SHashNode *doCreateHashNode(const void *key, size_t keyLen, const void *p
|
|||
static FORCE_INLINE void doUpdateHashNode(SHashObj *pHashObj, SHashEntry* pe, SHashNode* prev, SHashNode *pNode, SHashNode *pNewNode) {
|
||||
assert(pNode->keyLen == pNewNode->keyLen);
|
||||
|
||||
atomic_sub_fetch_32(&pNode->refCount, 1);
|
||||
atomic_sub_fetch_16(&pNode->refCount, 1);
|
||||
if (prev != NULL) {
|
||||
prev->next = pNewNode;
|
||||
} else {
|
||||
|
@ -302,7 +302,7 @@ int32_t taosHashGetSize(const SHashObj *pHashObj) {
|
|||
if (pHashObj == NULL) {
|
||||
return 0;
|
||||
}
|
||||
return (int32_t)atomic_load_64(&pHashObj->size);
|
||||
return (int32_t)atomic_load_64((int64_t*)&pHashObj->size);
|
||||
}
|
||||
|
||||
int32_t taosHashPut(SHashObj *pHashObj, const void *key, size_t keyLen, void *data, size_t size) {
|
||||
|
@ -508,7 +508,7 @@ int32_t taosHashRemove(SHashObj *pHashObj, const void *key, size_t keyLen) {
|
|||
pNode->removed == 0) {
|
||||
code = 0; // it is found
|
||||
|
||||
atomic_sub_fetch_32(&pNode->refCount, 1);
|
||||
atomic_sub_fetch_16(&pNode->refCount, 1);
|
||||
pNode->removed = 1;
|
||||
if (pNode->refCount <= 0) {
|
||||
if (prevNode == NULL) {
|
||||
|
@ -755,7 +755,7 @@ static void *taosHashReleaseNode(SHashObj *pHashObj, void *p, int *slot) {
|
|||
pNode = pNode->next;
|
||||
}
|
||||
|
||||
atomic_sub_fetch_32(&pOld->refCount, 1);
|
||||
atomic_sub_fetch_16(&pOld->refCount, 1);
|
||||
if (pOld->refCount <=0) {
|
||||
if (prevNode) {
|
||||
prevNode->next = pOld->next;
|
||||
|
|
|
@ -132,7 +132,7 @@ static timer_map_t timerMap;
|
|||
static uintptr_t getNextTimerId() {
|
||||
uintptr_t id;
|
||||
do {
|
||||
id = atomic_add_fetch_ptr(&nextTimerId, 1);
|
||||
id = (uintptr_t)atomic_add_fetch_ptr((void **)&nextTimerId, 1);
|
||||
} while (id == 0);
|
||||
return id;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue