1#ifndef NE_STL_INCLUDENE_STL_ATOMIC_H
2#define NE_STL_INCLUDENE_STL_ATOMIC_H
5#if defined(NE_STL_USE_CLANG) || defined(NE_STL_USE_GCC)
6#define NE_STL_ATOMIC_USE_GCC_INTRINSICS
7#elif defined(NE_STL_USE_MSVC)
8#define NE_STL_ATOMIC_USE_MSVC_INTRINSICS
10namespace msvc_atomic {
11template <
typename T, std::
size_t N = sizeof(T)>
15struct interlocked<T, 1> {
16 static inline T increment(T
volatile* x) {
17 int8_t old_val, new_val;
19 old_val =
static_cast<int8_t
>(*x);
20 new_val = old_val +
static_cast<int8_t
>(1);
21 }
while (_InterlockedCompareExchange8(
reinterpret_cast<volatile int8_t*
>(x), new_val, old_val) != old_val);
22 return static_cast<T
>(new_val);
25 static inline T decrement(T
volatile* x) {
26 int8_t old_val, new_val;
28 old_val =
static_cast<int8_t
>(*x);
29 new_val = old_val -
static_cast<int8_t
>(1);
30 }
while (_InterlockedCompareExchange8(
reinterpret_cast<volatile int8_t*
>(x), new_val, old_val) != old_val);
31 return static_cast<T
>(new_val);
34 static inline T compare_exchange(T
volatile* x,
const T new_val,
const T expected_val) {
35 return static_cast<T
>(_InterlockedCompareExchange8(
36 reinterpret_cast<volatile int8_t*
>(x),
static_cast<const int8_t
>(new_val),
static_cast<const int8_t
>(expected_val)));
39 static inline T
exchange(T
volatile* x,
const T new_val) {
40 return static_cast<T
>(_InterlockedExchange8(
reinterpret_cast<volatile char*
>(x),
static_cast<const char>(new_val)));
45struct interlocked<T, 2> {
46 static inline T increment(T
volatile* x) {
return static_cast<T
>(_InterlockedIncrement16(
reinterpret_cast<volatile int16_t*
>(x))); }
48 static inline T decrement(T
volatile* x) {
return static_cast<T
>(_InterlockedDecrement16(
reinterpret_cast<volatile int16_t*
>(x))); }
50 static inline T compare_exchange(T
volatile* x,
const T new_val,
const T expected_val) {
51 return static_cast<T
>(_InterlockedCompareExchange16(
52 reinterpret_cast<volatile int16_t*
>(x),
static_cast<const int16_t
>(new_val),
static_cast<const int16_t
>(expected_val)));
55 static inline T
exchange(T
volatile* x,
const T new_val) {
56 return static_cast<T
>(_InterlockedExchange16(
reinterpret_cast<volatile int16_t*
>(x),
static_cast<const int16_t
>(new_val)));
61struct interlocked<T, 4> {
62 static inline T increment(T
volatile* x) {
return static_cast<T
>(_InterlockedIncrement(
reinterpret_cast<volatile long*
>(x))); }
64 static inline T decrement(T
volatile* x) {
return static_cast<T
>(_InterlockedDecrement(
reinterpret_cast<volatile long*
>(x))); }
66 static inline T compare_exchange(T
volatile* x,
const T new_val,
const T expected_val) {
67 return static_cast<T
>(_InterlockedCompareExchange(
68 reinterpret_cast<volatile long*
>(x),
static_cast<const long>(new_val),
static_cast<const long>(expected_val)));
71 static inline T
exchange(T
volatile* x,
const T new_val) {
72 long ret = _InterlockedExchange(
reinterpret_cast<volatile long*
>(x), *((
long*)((
void*)(&new_val))));
73 return *(T*)((
void*)(&ret));
78struct interlocked<T, 8> {
79 static inline T increment(T
volatile* x) {
80#if defined(NE_STL_ARC_HCPU_64_BITS)
81 return static_cast<T
>(_InterlockedIncrement64(
reinterpret_cast<volatile int64_t*
>(x)));
83 int64_t old_val, new_val;
85 old_val =
static_cast<int64_t
>(*x);
86 new_val = old_val +
static_cast<int64_t
>(1);
87 }
while (_InterlockedCompareExchange64(
reinterpret_cast<volatile int64_t*
>(x), new_val, old_val) != old_val);
88 return static_cast<T
>(new_val);
92 static inline T decrement(T
volatile* x) {
93#if defined(NE_STL_ARC_HCPU_64_BITS)
94 return static_cast<T
>(_InterlockedDecrement64(
reinterpret_cast<volatile int64_t*
>(x)));
96 int64_t old_val, new_val;
98 old_val =
static_cast<int64_t
>(*x);
99 new_val = old_val -
static_cast<int64_t
>(1);
100 }
while (_InterlockedCompareExchange64(
reinterpret_cast<volatile int64_t*
>(x), new_val, old_val) != old_val);
101 return static_cast<T
>(new_val);
105 static inline T compare_exchange(T
volatile* x,
const T new_val,
const T expected_val) {
106 return reinterpret_cast<T
>(_InterlockedCompareExchange64(
107 reinterpret_cast<volatile int64_t*
>(x),
reinterpret_cast<const int64_t
>(new_val),
reinterpret_cast<const int64_t
>(expected_val)));
110 static inline T
exchange(T
volatile* x,
const T new_val) {
111#if defined(NE_STL_ARC_HCPU_64_BITS)
112 return reinterpret_cast<T
>(_InterlockedExchange64(
reinterpret_cast<volatile int64_t*
>(x),
reinterpret_cast<const int64_t
>(new_val)));
116 old_val =
static_cast<int64_t
>(*x);
117 }
while (_InterlockedCompareExchange64(
reinterpret_cast<volatile int64_t*
>(x), new_val, old_val) != old_val);
118 return static_cast<T
>(old_val);
125#error Unsupported compiler / system.
133 static_assert(
sizeof(T) == 1 ||
sizeof(T) == 2 ||
sizeof(T) == 4 ||
sizeof(T) == 8,
"Only types of size 1, 2, 4 or 8 are supported");
136 :
value_(static_cast<T>(0)) {}
142#if defined(NE_STL_ATOMIC_USE_GCC_INTRINSICS)
143 return __atomic_add_fetch(&
value_, 1, __ATOMIC_SEQ_CST);
144#elif defined(NE_STL_ATOMIC_USE_MSVC_INTRINSICS)
145 return msvc_atomic::interlocked<T>::increment(&
value_);
152#if defined(NE_STL_ATOMIC_USE_GCC_INTRINSICS)
153 return __atomic_sub_fetch(&
value_, 1, __ATOMIC_SEQ_CST);
154#elif defined(NE_STL_ATOMIC_USE_MSVC_INTRINSICS)
155 return msvc_atomic::interlocked<T>::decrement(&
value_);
162#if defined(NE_STL_ATOMIC_USE_GCC_INTRINSICS)
164 return __atomic_compare_exchange_n(&
value_, &e, new_val,
true, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
165#elif defined(NE_STL_ATOMIC_USE_MSVC_INTRINSICS)
166 const T old_val = msvc_atomic::interlocked<T>::compare_exchange(&
value_, new_val, expected_val);
167 return (old_val == expected_val);
170 return value_.compare_exchange_weak(e, new_val);
175#if defined(NE_STL_ATOMIC_USE_GCC_INTRINSICS)
176 __atomic_store_n(&
value_, new_val, __ATOMIC_SEQ_CST);
177#elif defined(NE_STL_ATOMIC_USE_MSVC_INTRINSICS)
178 (void)msvc_atomic::interlocked<T>::exchange(&
value_, new_val);
185#if defined(NE_STL_ATOMIC_USE_GCC_INTRINSICS)
186 return __atomic_load_n(&
value_, __ATOMIC_SEQ_CST);
187#elif defined(NE_STL_ATOMIC_USE_MSVC_INTRINSICS)
195#if defined(NE_STL_ATOMIC_USE_GCC_INTRINSICS)
196 return __atomic_exchange_n(&
value_, new_val, __ATOMIC_SEQ_CST);
197#elif defined(NE_STL_ATOMIC_USE_MSVC_INTRINSICS)
198 return msvc_atomic::interlocked<T>::exchange(&
value_, new_val);
200 return value_.exchange(new_val);
212 operator const T()
const {
return load(); }
215#if defined(NE_STL_ATOMIC_USE_GCC_INTRINSICS) || defined(NE_STL_ATOMIC_USE_MSVC_INTRINSICS)
Definition: ne_stl_atomic.h:131
std::atomic< T > value_
Definition: ne_stl_atomic.h:218
T load() const
Definition: ne_stl_atomic.h:184
T operator=(const T new_value)
Definition: ne_stl_atomic.h:204
T exchange(const T new_val)
Definition: ne_stl_atomic.h:194
atomic()
Definition: ne_stl_atomic.h:135
atomic & operator=(const atomic &)=delete
T operator--()
Definition: ne_stl_atomic.h:151
bool compare_exchange(const T expected_val, const T new_val)
Definition: ne_stl_atomic.h:161
atomic(const T value)
Definition: ne_stl_atomic.h:138
void store(const T new_val)
Definition: ne_stl_atomic.h:174
T operator++()
Definition: ne_stl_atomic.h:141
Definition: ne_stl_any.h:7
nstd::atomic< uint32_t > atomic_ulong
Definition: ne_stl_atomic.h:233
nstd::atomic< int32_t > atomic_int
Definition: ne_stl_atomic.h:230
nstd::atomic< uint64_t > atomic_ullong
Definition: ne_stl_atomic.h:235
nstd::atomic< int32_t > atomic_long
Definition: ne_stl_atomic.h:232
nstd::atomic< bool > atomic_bool
Definition: ne_stl_atomic.h:224
nstd::atomic< int8_t > atomic_schar
Definition: ne_stl_atomic.h:226
nstd::atomic< int64_t > atomic_llong
Definition: ne_stl_atomic.h:234
nstd::atomic< int16_t > atomic_short
Definition: ne_stl_atomic.h:228
nstd::atomic< char > atomic_char
Definition: ne_stl_atomic.h:225
nstd::atomic< uint32_t > atomic_uint
Definition: ne_stl_atomic.h:231
nstd::atomic< uint8_t > atomic_uchar
Definition: ne_stl_atomic.h:227
nstd::atomic< uint16_t > atomic_ushort
Definition: ne_stl_atomic.h:229
T exchange(T &obj, U &&new_value)
Definition: phmap_base.h:1127