diff --git a/Include/auROXTL/auAtomic.hpp b/Include/auROXTL/auAtomic.hpp index c96df69..ad63164 100644 --- a/Include/auROXTL/auAtomic.hpp +++ b/Include/auROXTL/auAtomic.hpp @@ -11,6 +11,27 @@ #include #endif +// Defines: +// AuAtomicCompareExchange (no weak variants yet) +// - +// AuAtomicTestAndSet +// AuAtomicClearU8Lock +// AuAtomicSet +// AuAtomicUnset (ret: bool) +// AuAtomicAndUnsetBit (ret: T) +// - +// AuAtomicLoadWeak (tbd) +// (no weak store) (tbd) +// - +// AuAtomicLoad +// AuAtomicStore +// - +// AuAtomicOrSetBit +// AuAtomicOr +// AuAtomicAnd +// AuAtomicAdd +// AuAtomicSub + template struct AuAtomicUtils { @@ -77,6 +98,9 @@ struct AuAtomicUtils // static void ClearU8Lock(T *in); + + // + static T LoadWeak(T *in); }; #if defined(AURORA_COMPILER_MSVC) @@ -340,6 +364,12 @@ inline auline T AuAtomicUtils::And(T *in, T value) #endif +template +inline auline T AuAtomicUtils::LoadWeak(T *in) +{ + return AuAtomicUtils::Load(in); +} + #if defined(AURORA_COMPILER_CLANG) #define ATOMIC_PREFIX_HAX(name) __c11_ ## name #else @@ -385,6 +415,14 @@ inline auline T AuAtomicUtils::Load(T *in) { return ATOMIC_PREFIX_HAX(atomic_load)((atomic_int_fast32_t *)in, __ATOMIC_ACQUIRE); } + else if constexpr (AuIsSame_v) + { + return ATOMIC_PREFIX_HAX(atomic_load)((atomic_uint_fast64_t *)in, __ATOMIC_ACQUIRE); + } + else if constexpr (AuIsSame_v) + { + return ATOMIC_PREFIX_HAX(atomic_load)((atomic_int_fast64_t *)in, __ATOMIC_ACQUIRE); +} else { static_assert(AuIsVoid_v, "T"); @@ -401,8 +439,47 @@ inline auline void AuAtomicUtils::Store(T *in, T val) #if defined(AURORA_COMPILER_MSVC) && (defined(AURORA_ARCH_X64) || defined(AURORA_ARCH_X86)) *in = val; #elif defined(AURORA_COMPILER_MSVC) - *in = val; +#if 0 + * in = val; ::MemoryBarrier(); +#else + if constexpr (AuIsSame_v) + { + ::InterlockedExchange8((CHAR volatile *)in, (CHAR)val); + } + else if constexpr (AuIsSame_v) + { + ::InterlockedExchange8((CHAR volatile *)in, (CHAR)val); + } + else if constexpr (AuIsSame_v) + { + ::InterlockedExchange16((SHORT volatile *)in, (SHORT)val); + } + else if constexpr (AuIsSame_v) + { + ::InterlockedExchange16((SHORT volatile *)in, (SHORT)val); + } + else if constexpr (AuIsSame_v) + { + ::InterlockedExchange32((LONG32 volatile *)in, (LONG32)val); + } + else if constexpr (AuIsSame_v) + { + ::InterlockedExchange32((LONG32 volatile *)in, (LONG32)val); + } + else if constexpr (AuIsSame_v) + { + ::InterlockedExchange64((LONG64 volatile *)in, (LONG64)val); + } + else if constexpr (AuIsSame_v) + { + ::InterlockedExchange64((LONG64 volatile *)in, (LONG64)val); + } + else + { + static_assert(AuIsVoid_v, "T"); + } +#endif #else if constexpr (AuIsSame_v) { @@ -428,6 +505,14 @@ inline auline void AuAtomicUtils::Store(T *in, T val) { ATOMIC_PREFIX_HAX(atomic_store)((atomic_int_fast32_t *)in, val, __ATOMIC_RELEASE); } + else if constexpr (AuIsSame_v) + { + ATOMIC_PREFIX_HAX(atomic_store)((atomic_uint_fast64_t *)in, val, __ATOMIC_RELEASE); + } + else if constexpr (AuIsSame_v) + { + ATOMIC_PREFIX_HAX(atomic_store)((atomic_int_fast64_t *)in, val, __ATOMIC_RELEASE); + } else { static_assert(AuIsVoid_v, "T"); @@ -529,6 +614,13 @@ T AuAtomicLoad(T *in) return AuAtomicUtils::Load(in); } +template +auline +T AuAtomicLoadWeak(T *in) +{ + return AuAtomicUtils::LoadWeak(in); +} + template auline void AuAtomicClearU8Lock(T *in) @@ -621,6 +713,13 @@ T AuAtomicLoad(volatile T *in) return AuAtomicUtils::Load((T *)in); } +template +auline +T AuAtomicLoadWeak(volatile T *in) +{ + return AuAtomicUtils::LoadWeak((T *)in); +} + template auline void AuAtomicClearU8Lock(volatile T *in)