summaryrefslogtreecommitdiff
path: root/source/kit/atomic.h
diff options
context:
space:
mode:
authorMitya Selivanov <automainint@guattari.tech>2023-12-29 06:21:33 +0100
committerMitya Selivanov <automainint@guattari.tech>2023-12-29 06:21:33 +0100
commit2d6c8fec45b23a8a28668ecf3ef281139ab778a7 (patch)
tree75d2a8538992129a83c0c2b83688289443d697e5 /source/kit/atomic.h
parent820b171245f2f14766f3accdb0246a4e2c0d596a (diff)
downloadsaw-2d6c8fec45b23a8a28668ecf3ef281139ab778a7.zip
refactor dependencies; include dependencies source code
Diffstat (limited to 'source/kit/atomic.h')
-rw-r--r--source/kit/atomic.h221
1 files changed, 221 insertions, 0 deletions
diff --git a/source/kit/atomic.h b/source/kit/atomic.h
new file mode 100644
index 0000000..a91cb4c
--- /dev/null
+++ b/source/kit/atomic.h
@@ -0,0 +1,221 @@
+#ifndef KIT_ATOMIC_H
+#define KIT_ATOMIC_H
+
+#include "types.h"
+
+#ifndef _MSC_VER
+# include <stdatomic.h>
+#else
+# include <assert.h>
+
+# ifdef __cplusplus
+extern "C" {
+# endif
+
+# define _Atomic volatile
+
+enum {
+ memory_order_relaxed,
+ memory_order_consume,
+ memory_order_acquire,
+ memory_order_release,
+ memory_order_acq_rel,
+ memory_order_seq_cst
+};
+
+void kit_atomic_store_explicit_8(u8 volatile *var, u8 value,
+ i32 memory_order);
+
+void kit_atomic_store_explicit_16(u16 volatile *var, u16 value,
+ i32 memory_order);
+
+void kit_atomic_store_explicit_32(u32 volatile *var, u32 value,
+ i32 memory_order);
+
+void kit_atomic_store_explicit_64(u64 volatile *var, u64 value,
+ i32 memory_order);
+
+u8 kit_atomic_load_explicit_8(u8 volatile *var, i32 memory_order);
+
+u16 kit_atomic_load_explicit_16(u16 volatile *var, i32 memory_order);
+
+u32 kit_atomic_load_explicit_32(u32 volatile *var, i32 memory_order);
+
+u64 kit_atomic_load_explicit_64(u64 volatile *var, i32 memory_order);
+
+u8 kit_atomic_exchange_explicit_8(u8 volatile *var, u8 value,
+ i32 memory_order);
+
+u16 kit_atomic_exchange_explicit_16(u16 volatile *var, u16 value,
+ i32 memory_order);
+
+u32 kit_atomic_exchange_explicit_32(u32 volatile *var, u32 value,
+ i32 memory_order);
+
+u64 kit_atomic_exchange_explicit_64(u64 volatile *var, u64 value,
+ i32 memory_order);
+
+i32 kit_atomic_compare_exchange_explicit_8(u8 volatile *var,
+ u8 *expected, u8 value,
+ i32 memory_order_succ_,
+ i32 memory_order_fail_);
+
+i32 kit_atomic_compare_exchange_explicit_16(u16 volatile *var,
+ u16 *expected, u16 value,
+ i32 memory_order_succ_,
+ i32 memory_order_fail_);
+
+i32 kit_atomic_compare_exchange_explicit_32(u32 volatile *var,
+ u32 *expected, u32 value,
+ i32 memory_order_succ_,
+ i32 memory_order_fail_);
+
+i32 kit_atomic_compare_exchange_explicit_64(u64 volatile *var,
+ u64 *expected, u64 value,
+ i32 memory_order_succ_,
+ i32 memory_order_fail_);
+
+u8 kit_atomic_fetch_add_explicit_8(u8 volatile *var, u8 value,
+ i32 memory_order);
+
+u16 kit_atomic_fetch_add_explicit_16(u16 volatile *var, u16 value,
+ i32 memory_order);
+
+u32 kit_atomic_fetch_add_explicit_32(u32 volatile *var, u32 value,
+ i32 memory_order);
+
+u64 kit_atomic_fetch_add_explicit_64(u64 volatile *var, u64 value,
+ i32 memory_order);
+
+# define atomic_store_explicit(var_, value_, memory_order_) \
+ do { \
+ assert(sizeof *(var_) == 1 || sizeof *(var_) == 2 || \
+ sizeof *(var_) == 4 || sizeof *(var_) == 8); \
+ if (sizeof *(var_) == 1) \
+ kit_atomic_store_explicit_8((u8 volatile *) (var_), \
+ (u8) (value_), (memory_order_)); \
+ if (sizeof *(var_) == 2) \
+ kit_atomic_store_explicit_16((u16 volatile *) (var_), \
+ (u16) (value_), \
+ (memory_order_)); \
+ if (sizeof *(var_) == 4) \
+ kit_atomic_store_explicit_32((u32 volatile *) (var_), \
+ (u32) (value_), \
+ (memory_order_)); \
+ if (sizeof *(var_) == 8) \
+ kit_atomic_store_explicit_64((u64 volatile *) (var_), \
+ (u64) (value_), \
+ (memory_order_)); \
+ } while (0)
+
+# define atomic_load_explicit(var_, memory_order_) \
+ (assert(sizeof *(var_) == 1 || sizeof *(var_) == 2 || \
+ sizeof *(var_) == 4 || sizeof *(var_) == 8), \
+ (sizeof *(var_) == 1 \
+ ? kit_atomic_load_explicit_8((u8 volatile *) (var_), \
+ (memory_order_)) \
+ : sizeof *(var_) == 2 \
+ ? kit_atomic_load_explicit_16((u16 volatile *) (var_), \
+ (memory_order_)) \
+ : sizeof *(var_) == 4 \
+ ? kit_atomic_load_explicit_32((u32 volatile *) (var_), \
+ (memory_order_)) \
+ : kit_atomic_load_explicit_64((u64 volatile *) (var_), \
+ (memory_order_))))
+
+# define atomic_exchange_explicit(var_, value_, memory_order_) \
+ (assert(sizeof *(var_) == 1 || sizeof *(var_) == 2 || \
+ sizeof *(var_) == 4 || sizeof *(var_) == 8), \
+ (sizeof *(var_) == 1 ? kit_atomic_exchange_explicit_8( \
+ (u8 volatile *) (var_), \
+ (u8) (value_), (memory_order_)) \
+ : sizeof *(var_) == 2 ? kit_atomic_exchange_explicit_16( \
+ (u16 volatile *) (var_), \
+ (u16) (value_), (memory_order_)) \
+ : sizeof *(var_) == 4 \
+ ? kit_atomic_exchange_explicit_32((u32 volatile *) (var_), \
+ (u32) (value_), \
+ (memory_order_)) \
+ : kit_atomic_exchange_explicit_64((u64 volatile *) (var_), \
+ (u64) (value_), \
+ (memory_order_))))
+
+# define atomic_compare_exchange_strong_explicit( \
+ var_, expected_, value_, memory_order_succ_, \
+ memory_order_fail_) \
+ (assert(sizeof *(var_) == 1 || sizeof *(var_) == 2 || \
+ sizeof *(var_) == 4 || sizeof *(var_) == 8), \
+ (sizeof *(var_) == 1 \
+ ? kit_atomic_compare_exchange_explicit_8( \
+ (u8 volatile *) (var_), (u8 *) (expected_), \
+ (u8) (value_), (memory_order_succ_), \
+ (memory_order_fail_)) \
+ : sizeof *(var_) == 2 \
+ ? kit_atomic_compare_exchange_explicit_16( \
+ (u16 volatile *) (var_), (u16 *) (expected_), \
+ (u16) (value_), (memory_order_succ_), \
+ (memory_order_fail_)) \
+ : sizeof *(var_) == 4 \
+ ? kit_atomic_compare_exchange_explicit_32( \
+ (u32 volatile *) (var_), (u32 *) (expected_), \
+ (u32) (value_), (memory_order_succ_), \
+ (memory_order_fail_)) \
+ : kit_atomic_compare_exchange_explicit_64( \
+ (u64 volatile *) (var_), (u64 *) (expected_), \
+ (u64) (value_), (memory_order_succ_), \
+ (memory_order_fail_))))
+
+# define atomic_compare_exchange_weak_explicit( \
+ var_, expected_, value_, memory_order_succ_, \
+ memory_order_fail_) \
+ atomic_compare_exchange_strong_explicit(var_, expected_, value_, \
+ memory_order_succ_, \
+ memory_order_fail_)
+
+# define atomic_fetch_add_explicit(var_, value_, memory_order_) \
+ (assert(sizeof *(var_) == 1 || sizeof *(var_) == 2 || \
+ sizeof *(var_) == 4 || sizeof *(var_) == 8), \
+ (sizeof *(var_) == 1 ? kit_atomic_fetch_add_explicit_8( \
+ (u8 volatile *) (var_), \
+ (u8) (value_), (memory_order_)) \
+ : sizeof *(var_) == 2 ? kit_atomic_fetch_add_explicit_16( \
+ (u16 volatile *) (var_), \
+ (u16) (value_), (memory_order_)) \
+ : sizeof *(var_) == 4 ? kit_atomic_fetch_add_explicit_32( \
+ (u32 volatile *) (var_), \
+ (u32) (value_), (memory_order_)) \
+ : kit_atomic_fetch_add_explicit_64( \
+ (u64 volatile *) (var_), \
+ (u64) (value_), (memory_order_))))
+
+# define atomic_store(var_, value_) \
+ atomic_store(var_, value_, memory_order_seq_cst)
+
+# define atomic_load(var_) atomic_load(var_, memory_order_seq_cst)
+
+# define atomic_exchange(var_, value_) \
+ atomic_exchange(var_, value_, memory_order_seq_cst)
+
+# define atomic_compare_exchange_strong(var_, expected_, value_) \
+ atomic_compare_exchange_strong_explicit(var_, expected_, value_, \
+ memory_order_seq_cst, \
+ memory_order_seq_cst)
+
+# define atomic_compare_exchange_weak(var_, expected_, value_) \
+ atomic_compare_exchange_weak_explicit(var_, expected_, value_, \
+ memory_order_seq_cst, \
+ memory_order_seq_cst)
+
+# define atomic_fetch_add(var_, value_) \
+ atomic_fetch_add(var_, value_, memory_order_seq_cst)
+
+# ifdef __cplusplus
+}
+# endif
+#endif
+
+#ifndef KIT_DISABLE_SHORT_NAMES
+# define ATOMIC KIT_ATOMIC
+#endif
+
+#endif