Compare commits

...

4 Commits

Author SHA1 Message Date
Ondřej Surý
461b4e1561 Add ATOMIC_VAR_INIT initializer to mutexatomics.h 2019-09-18 10:15:44 +02:00
Ondřej Surý
0e2572faeb Add atomic_fetch_add and atomic_fetch_or convenience macros and unix and win32 shims 2019-09-18 10:15:38 +02:00
Ondřej Surý
4771bc57b1 Add mutexatomic.h atomic_int and atomic_uint types 2019-09-18 10:12:52 +02:00
Ondřej Surý
7ccf2e0366 Add atomic_int and atomic_uint in unix and win32 stdatomic.h shims 2019-09-18 09:53:17 +02:00
4 changed files with 126 additions and 5 deletions

View File

@@ -35,6 +35,10 @@
atomic_fetch_add_explicit((o), (v), memory_order_relaxed)
#define atomic_fetch_sub_relaxed(o, v) \
atomic_fetch_sub_explicit((o), (v), memory_order_relaxed)
#define atomic_fetch_or_relaxed(o, v) \
atomic_fetch_or_explicit((o), (v), memory_order_relaxed)
#define atomic_fetch_and_relaxed(o, v) \
atomic_fetch_and_explicit((o), (v), memory_order_relaxed)
#define atomic_exchange_relaxed(o, v) \
atomic_exchange_explicit((o), (v), memory_order_relaxed)
#define atomic_compare_exchange_weak_relaxed(o, e, d) \

View File

@@ -73,6 +73,16 @@ enum memory_order {
typedef enum memory_order memory_order;
typedef struct automic_int {
isc_mutex_t m;
int v;
} atomic_int;
typedef struct atomic_uint {
isc_mutex_t m;
unsigned int v;
} atomic_uint;
typedef struct atomic_int_fast32 {
isc_mutex_t m;
int32_t v;
@@ -93,12 +103,13 @@ typedef struct atomic_uint_fast64 {
uint64_t v;
} atomic_uint_fast64_t;
typedef struct atomic_bool_s {
typedef struct atomic_bool {
isc_mutex_t m;
bool v;
} atomic_bool;
#define ATOMIC_VAR_INIT(arg) \
{ .m = PTHREAD_MUTEX_INITIALIZER, .v = arg }
#define atomic_init(obj, desired) \
{ \

View File

@@ -74,6 +74,8 @@ enum memory_order {
typedef enum memory_order memory_order;
typedef int atomic_int;
typedef unsigned int atomic_uint;
typedef int_fast32_t atomic_int_fast32_t;
typedef uint_fast32_t atomic_uint_fast32_t;
typedef int_fast64_t atomic_int_fast64_t;
@@ -106,6 +108,10 @@ typedef bool atomic_bool;
__atomic_fetch_add(obj, arg, order)
#define atomic_fetch_sub_explicit(obj, arg, order) \
__atomic_fetch_sub(obj, arg, order)
#define atomic_fetch_and_explicit(obj, arg, order) \
__atomic_fetch_and(obj, arg, order)
#define atomic_fetch_or_explicit(obj, arg, order) \
__atomic_fetch_or(obj, arg, order)
#define atomic_compare_exchange_strong_explicit(obj, expected, desired, succ, fail) \
__atomic_compare_exchange_n(obj, expected, desired, 0, succ, fail)
#define atomic_compare_exchange_weak_explicit(obj, expected, desired, succ, fail) \
@@ -121,10 +127,14 @@ typedef bool atomic_bool;
*obj = desired; \
__sync_synchronize(); \
} while (0);
#define atomic_fetch_add_explicit(obj, arg, order) \
#define atomic_fetch_add_explicit(obj, arg, order) \
__sync_fetch_and_add(obj, arg)
#define atomic_fetch_sub_explicit(obj, arg, order) \
#define atomic_fetch_sub_explicit(obj, arg, order) \
__sync_fetch_and_sub(obj, arg, order)
#define atomic_fetch_and_explicit(obj, arg, order) \
__sync_fetch_and_and(obj, arg, order)
#define atomic_fetch_or_explicit(obj, arg, order) \
__sync_fetch_and_or(obj, arg, order)
#define atomic_compare_exchange_strong_explicit(obj, expected, desired, succ, fail) \
({ \
__typeof__(obj) __v; \
@@ -148,6 +158,10 @@ typedef bool atomic_bool;
atomic_fetch_add_explicit(obj, arg, memory_order_seq_cst)
#define atomic_fetch_sub(obj, arg) \
atomic_fetch_sub_explicit(obj, arg, memory_order_seq_cst)
#define atomic_fetch_and(obj, arg) \
atomic_fetch_and_explicit(obj, arg, memory_order_seq_cst)
#define atomic_fetch_or(obj, arg) \
atomic_fetch_or_explicit(obj, arg, memory_order_seq_cst)
#define atomic_compare_exchange_strong(obj, expected, desired) \
atomic_compare_exchange_strong_explicit(obj, expected, desired, memory_order_seq_cst, memory_order_seq_cst)
#define atomic_compare_exchange_weak(obj, expected, desired) \

View File

@@ -64,6 +64,8 @@ typedef enum memory_order memory_order;
*/
typedef bool volatile atomic_bool;
typedef int volatile atomic_int;
typedef unsigned int volatile atomic_uint;
typedef int_fast8_t volatile atomic_int_fast8_t;
typedef uint_fast8_t volatile atomic_uint_fast8_t;
typedef int_fast32_t volatile atomic_int_fast32_t;
@@ -187,7 +189,7 @@ atomic_load_abort() {
: InterlockedExchangeAdd64((atomic_int_fast64_t *)obj, arg))))
#else
#define atomic_fetch_add_explicit64(obj, arg, order) \
InterlockedExchange64((atomic_int_fast64_t *)obj, arg)
InterlockedExchangeAdd64((atomic_int_fast64_t *)obj, arg)
#endif
static inline
@@ -215,6 +217,96 @@ atomic_add_abort() {
#define atomic_fetch_sub(obj, arg) \
atomic_fetch_sub_explicit(obj, arg, memory_order_seq_cst)
#define atomic_fetch_and_explicit8(obj, arg, order) \
InterlockedAnd8((atomic_int_fast8_t)obj, arg)
#define atomic_fetch_and_explicit32(obj, arg, order) \
(order == memory_order_relaxed \
? InterlockedAndNoFence((atomic_int_fast32_t *)obj, arg) \
: (order == memory_order_acquire \
? InterlockedAndAcquire((atomic_int_fast32_t *)obj, arg) \
: (order == memory_order_release \
? InterlockedAndRelease((atomic_int_fast32_t *)obj, arg) \
: InterlockedAnd((atomic_int_fast32_t *)obj, arg))))
#ifdef _WIN64
#define atomic_fetch_and_explicit64(obj, arg, order) \
(order == memory_order_relaxed \
? InterlockedAnd64NoFence((atomic_int_fast64_t *)obj, arg) \
: (order == memory_order_acquire \
? InterlockedAnd64Acquire((atomic_int_fast64_t *)obj, arg) \
: (order == memory_order_release \
? InterlockedAnd64Release((atomic_int_fast64_t *)obj, arg) \
: InterlockedAnd64((atomic_int_fast64_t *)obj, arg))))
#else
#define atomic_fetch_and_explicit64(obj, arg, order) \
InterlockedAnd64((atomic_int_fast64_t *)obj, arg)
#endif
static inline
int8_t
atomic_and_abort() {
INSIST(0);
ISC_UNREACHABLE();
}
#define atomic_fetch_and_explicit(obj, arg, order) \
(sizeof(*(obj)) == 8 \
? atomic_fetch_and_explicit64(obj, arg, order) \
: (sizeof(*(obj)) == 4 \
? atomic_fetch_and_explicit32(obj, arg, order) \
: (sizeof(*(obj)) == 1 \
? atomic_fetch_and_explicit8(obj, arg, order) \
: atomic_and_abort())))
#define atomic_fetch_and(obj, arg) \
atomic_fetch_and_explicit(obj, arg, memory_order_seq_cst)
#define atomic_fetch_or_explicit8(obj, arg, order) \
InterlockedOr8((atomic_int_fast8_t)obj, arg)
#define atomic_fetch_or_explicit32(obj, arg, order) \
(order == memory_order_relaxed \
? InterlockedOrNoFence((atomic_int_fast32_t *)obj, arg) \
: (order == memory_order_acquire \
? InterlockedOrAcquire((atomic_int_fast32_t *)obj, arg) \
: (order == memory_order_release \
? InterlockedOrRelease((atomic_int_fast32_t *)obj, arg) \
: InterlockedOr((atomic_int_fast32_t *)obj, arg))))
#ifdef _WIN64
#define atomic_fetch_or_explicit64(obj, arg, order) \
(order == memory_order_relaxed \
? InterlockedOr64NoFence((atomic_int_fast64_t *)obj, arg) \
: (order == memory_order_acquire \
? InterlockedOr64Acquire((atomic_int_fast64_t *)obj, arg) \
: (order == memory_order_release \
? InterlockedOr64Release((atomic_int_fast64_t *)obj, arg) \
: InterlockedOr64((atomic_int_fast64_t *)obj, arg))))
#else
#define atomic_fetch_or_explicit64(obj, arg, order) \
InterlockedOr64((atomic_int_fast64_t *)obj, arg)
#endif
static inline
int8_t
atomic_or_abort() {
INSIST(0);
ISC_UNREACHABLE();
}
#define atomic_fetch_or_explicit(obj, arg, order) \
(sizeof(*(obj)) == 8 \
? atomic_fetch_or_explicit64(obj, arg, order) \
: (sizeof(*(obj)) == 4 \
? atomic_fetch_or_explicit32(obj, arg, order) \
: (sizeof(*(obj)) == 1 \
? atomic_fetch_or_explicit8(obj, arg, order) \
: atomic_or_abort())))
#define atomic_fetch_or(obj, arg) \
atomic_fetch_or_explicit(obj, arg, memory_order_seq_cst)
static inline bool
atomic_compare_exchange_strong_explicit8(atomic_int_fast8_t *obj,
int8_t *expected,