AuroraRuntime/Source/Threading/Primitives/AuRWLock.cpp

960 lines
26 KiB
C++

/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuRWLock.cpp
Date: 2021-6-12
Author: Reece
***/
#include <Source/RuntimeInternal.hpp>
//#define RWLOCK_VIEW_HAS_PARENT
#include "AuRWLock.hpp"
#include "SMTYield.hpp"
namespace Aurora::Threading::Primitives
{
#if defined(RWLOCK_VIEW_HAS_PARENT)
#define ViewParent (&this->parent_)
#else
#define ViewParent ((T *)(((char *)this) - (bIsReadView ? RWLockImpl<true>::kOffsetOfRead : RWLockImpl<true>::kOffsetOfWrite)))
#endif
#if defined(AURORA_COMPILER_MSVC)
#define RWLOCK_REORDER_BARRIER() ::MemoryBarrier();
#else
#define RWLOCK_REORDER_BARRIER()
#endif
static const auto kRWThreadWriterHardContextSwitchBias = 15;
template<bool bIsReadView, typename T>
void RWLockAccessView<bIsReadView, T>::Unlock()
{
if constexpr (bIsReadView)
{
ViewParent->UnlockRead();
}
else
{
ViewParent->UnlockWrite();
}
}
template<bool bIsReadView, typename T>
bool RWLockAccessView<bIsReadView, T>::LockAbsMS(AuUInt64 timeout)
{
if constexpr (bIsReadView)
{
return ViewParent->LockReadNSAbs(AuMSToNS<AuUInt64>(timeout));
}
else
{
return ViewParent->LockWriteNSAbs(AuMSToNS<AuUInt64>(timeout));
}
}
template<bool bIsReadView, typename T>
bool RWLockAccessView<bIsReadView, T>::LockAbsNS(AuUInt64 timeout)
{
if constexpr (bIsReadView)
{
return ViewParent->LockReadNSAbs(timeout);
}
else
{
return ViewParent->LockWriteNSAbs(timeout);
}
}
template<bool bIsReadView, typename T>
bool RWLockAccessView<bIsReadView, T>::LockMS(AuUInt64 timeout)
{
if constexpr (bIsReadView)
{
return ViewParent->LockReadNS(AuMSToNS<AuUInt64>(timeout));
}
else
{
return ViewParent->LockWriteNS(AuMSToNS<AuUInt64>(timeout));
}
}
template<bool bIsReadView, typename T>
bool RWLockAccessView<bIsReadView, T>::LockNS(AuUInt64 timeout)
{
if constexpr (bIsReadView)
{
return ViewParent->LockReadNS(timeout);
}
else
{
return ViewParent->LockWriteNS(timeout);
}
}
template<bool bIsReadView, typename T>
bool RWLockAccessView<bIsReadView, T>::TryLock()
{
if constexpr (bIsReadView)
{
return ViewParent->TryLockRead();
}
else
{
return ViewParent->TryLockWrite();
}
}
template<bool bIsWriteRecursionAllowed>
RWLockImpl<bIsWriteRecursionAllowed>::RWLockImpl()
#if defined(RWLOCK_VIEW_HAS_PARENT)
: read_(*this),
write_(*this)
#endif
#if 0
, condition_(AuUnsafeRaiiToShared(&this->mutex_)),
conditionWriter_(AuUnsafeRaiiToShared(&this->mutex_))
#endif
{
}
template<bool bIsWriteRecursionAllowed>
RWLockImpl<bIsWriteRecursionAllowed>::~RWLockImpl()
{
}
template<bool bIsWriteRecursionAllowed>
ConditionVariableInternal &RWLockImpl<bIsWriteRecursionAllowed>::GetCondition()
{
#if !defined(AURWLOCK_NO_SIZE_OPTIMIZED_CONDVAR)
return *(ConditionVariableInternal *)this->conditionVariable_;
#else
return this->condition_;
#endif
}
template<bool bIsWriteRecursionAllowed>
ConditionVariableInternal &RWLockImpl<bIsWriteRecursionAllowed>::GetConditionWriter()
{
#if !defined(AURWLOCK_NO_SIZE_OPTIMIZED_CONDVAR)
return *(ConditionVariableInternal *)this->conditionVariableWriter_;
#else
return this->conditionWriter_;
#endif
}
template<bool bIsWriteRecursionAllowed>
AuUInt32 *RWLockImpl<bIsWriteRecursionAllowed>::GetFutexCondition()
{
return (AuUInt32 *)&this->state_;
}
template<bool bIsWriteRecursionAllowed>
AuUInt32 *RWLockImpl<bIsWriteRecursionAllowed>::GetFutexConditionWriter()
{
return (AuUInt32 *)this->conditionVariableWriter_;
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::LockReadNSAbs(AuUInt64 uTimeout)
{
if (this->TryLockRead())
{
return true;
}
AuInt32 iCurState {};
do
{
iCurState = this->state_;
if (iCurState < 0)
{
if (gUseFutexRWLock)
{
if (!WaitOnAddressSteady((const void *)&this->state_, &iCurState, sizeof(iCurState), uTimeout))
{
return false;
}
}
else
{
AU_LOCK_GUARD(this->mutex_);
iCurState = this->state_;
if (iCurState < 0)
{
AuInt64 iSecondTimeout {};
if (uTimeout)
{
iSecondTimeout = AuInt64(uTimeout) - AuTime::SteadyClockNS();
if (iSecondTimeout <= 0)
{
return false;
}
}
#if defined(AURWLOCK_NO_SIZE_OPTIMIZED_CONDVAR)
if (!this->GetCondition().WaitForSignalNS(iSecondTimeout))
#else
if (!this->GetCondition().WaitForSignalNsEx(&this->mutex_, iSecondTimeout))
#endif
{
return false;
}
}
}
}
}
while (iCurState < 0 ||
AuAtomicCompareExchange(&this->state_, iCurState + 1, iCurState) != iCurState);
return true;
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::LockReadNS(AuUInt64 uTimeout)
{
if (this->TryLockReadNoSpin())
{
return true;
}
AuInt64 uEndTime = uTimeout ? AuTime::SteadyClockNS() + uTimeout : 0;
if (this->TryLockRead())
{
return true;
}
AuInt32 iCurState {};
do
{
iCurState = this->state_;
if (iCurState < 0)
{
if (gUseFutexRWLock)
{
if (!WaitOnAddressSteady((const void *)&this->state_, &iCurState, sizeof(iCurState), uEndTime))
{
return false;
}
}
else
{
AU_LOCK_GUARD(this->mutex_);
iCurState = this->state_;
if (iCurState < 0)
{
AuInt64 iSecondTimeout {};
if (uTimeout)
{
iSecondTimeout = uEndTime - AuTime::SteadyClockNS();
if (iSecondTimeout <= 0)
{
return false;
}
}
#if defined(AURWLOCK_NO_SIZE_OPTIMIZED_CONDVAR)
if (!this->GetCondition().WaitForSignalNS(iSecondTimeout))
#else
if (!this->GetCondition().WaitForSignalNsEx(&this->mutex_, iSecondTimeout))
#endif
{
return false;
}
}
}
}
}
while (iCurState < 0 ||
AuAtomicCompareExchange(&this->state_, iCurState + 1, iCurState) != iCurState);
return true;
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::LockWriteNSAbs(AuUInt64 uTimeout)
{
if constexpr (!bIsWriteRecursionAllowed)
{
if (TryLockWrite())
{
return true;
}
}
else
{
if (gUseFutexRWLock)
{
if (DoTryIf([=]()
{
return this->LockWriteNSAbsSecondPath();
}))
{
return true;
}
}
else
{
if (this->LockWriteNSAbsSecondPath())
{
return true;
}
}
}
if (gUseFutexRWLock)
{
return this->LockWriteNSAbsUnlocked(uTimeout);
}
else
{
AU_LOCK_GUARD(this->mutex_);
return this->LockWriteNSAbsUnlocked(uTimeout);
}
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::LockWriteNSAbsSecondPath()
{
auto uOld = this->state_;
if (uOld < 0)
{
if (this->reentrantWriteLockHandle_ == GetThreadCookie())
{
AuAtomicSub(&this->state_, 1);
return true;
}
}
else if (uOld == 0)
{
if (AuAtomicCompareExchange(&this->state_, -1, uOld) == uOld)
{
this->reentrantWriteLockHandle_ = GetThreadCookie();
return true;
}
}
return false;
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::LockWriteNS(AuUInt64 uTimeout)
{
if constexpr (!bIsWriteRecursionAllowed)
{
if (TryLockWrite())
{
return true;
}
}
else
{
auto uOld = this->state_;
if (uOld < 0)
{
if (this->reentrantWriteLockHandle_ == GetThreadCookie())
{
AuAtomicSub(&this->state_, 1);
return true;
}
}
else if (uOld == 0)
{
if (AuAtomicCompareExchange(&this->state_, -1, uOld) == uOld)
{
this->reentrantWriteLockHandle_ = GetThreadCookie();
return true;
}
}
}
if (gUseFutexRWLock)
{
AuInt64 uEndTime = uTimeout ? AuTime::SteadyClockNS() + uTimeout : 0;
return this->LockWriteNSAbsUnlocked(uEndTime);
}
else
{
AU_LOCK_GUARD(this->mutex_);
AuInt64 uEndTime = uTimeout ? AuTime::SteadyClockNS() + uTimeout : 0;
return this->LockWriteNSAbsUnlocked(uEndTime);
}
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::LockWriteNSAbsUnlocked(AuUInt64 qwTimeoutNS)
{
while (true)
{
AuInt32 iCurState;
while ((iCurState = this->state_) != 0)
{
AuInt64 uSecondTimeout = 0;
bool bStatus {};
if (gUseFutexRWLock)
{
auto pSemaphore = this->GetFutexConditionWriter();
AuInt32 iCurState;
while ((iCurState = this->state_) != 0)
{
bool bStatusTwo {};
AuAtomicAdd(&this->writersPending_, 1);
static const AuUInt32 kExpect { 0 };
RWLOCK_REORDER_BARRIER();
if ((iCurState = this->state_) == 0)
{
bStatus = true;
bStatusTwo = true;
}
else
{
bStatus = WaitOnAddress(pSemaphore, &kExpect, sizeof(kExpect), qwTimeoutNS);
}
AuAtomicSub(&this->writersPending_, 1);
if (!bStatus)
{
break;
}
if (!bStatusTwo)
{
while (true)
{
auto uState = *pSemaphore;
if (uState == 0)
{
break;
}
if (AuAtomicCompareExchange(pSemaphore, uState - 1, uState) == uState)
{
break;
}
}
}
}
}
else
{
if (qwTimeoutNS)
{
uSecondTimeout = qwTimeoutNS - AuTime::SteadyClockNS();
if (uSecondTimeout <= 0)
{
return false;
}
}
AuAtomicAdd(&this->writersPending_, 1);
#if defined(AURWLOCK_NO_SIZE_OPTIMIZED_CONDVAR)
bStatus = this->GetConditionWriter().WaitForSignalNS(uSecondTimeout);
#else
bStatus = this->GetConditionWriter().WaitForSignalNsEx(&this->mutex_, uSecondTimeout);
#endif
AuAtomicSub(&this->writersPending_, 1);
}
if constexpr (bIsWriteRecursionAllowed)
{
if (this->state_ == 1)
{
this->SignalManyWriter();
}
}
if (!bStatus)
{
return false;
}
}
if (AuAtomicCompareExchange(&this->state_, -1, 0) == 0)
{
this->reentrantWriteLockHandle_ = GetThreadCookie();
return true;
}
}
return true;
}
template<bool bIsWriteRecursionAllowed>
void RWLockImpl<bIsWriteRecursionAllowed>::SignalOneReader()
{
if (gUseFutexRWLock)
{
WakeOnAddress((const void *)&this->state_);
}
else
{
this->GetCondition().Signal();
}
}
template<bool bIsWriteRecursionAllowed>
void RWLockImpl<bIsWriteRecursionAllowed>::SignalOneWriter()
{
if (gUseFutexRWLock)
{
auto pThat = this->GetFutexConditionWriter();
AuAtomicAdd(pThat, 1u);
WakeOnAddress(pThat);
}
else
{
this->GetConditionWriter().Signal();
}
}
template<bool bIsWriteRecursionAllowed>
void RWLockImpl<bIsWriteRecursionAllowed>::SignalManyReader()
{
if (gUseFutexRWLock)
{
WakeAllOnAddress((const void *)&this->state_);
}
else
{
this->GetCondition().Broadcast();
}
}
template<bool bIsWriteRecursionAllowed>
void RWLockImpl<bIsWriteRecursionAllowed>::SignalManyWriter()
{
if (gUseFutexRWLock)
{
auto pThat = this->GetFutexConditionWriter();
AuUInt32 uCount = this->writersPending_;
AuAtomicAdd(pThat, uCount);
WakeNOnAddress(pThat, uCount);
}
else
{
this->GetConditionWriter().Broadcast();
}
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::TryLockRead()
{
if (gRuntimeConfig.threadingConfig.bPreferRWLockReadLockSpin)
{
return DoTryIf([=]()
{
return TryLockReadNoSpin();
});
}
else
{
return TryLockReadNoSpin();
}
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::TryLockReadNoSpin()
{
auto iCurState = this->state_;
if (iCurState < 0)
{
return this->reentrantWriteLockHandle_ == GetThreadCookie();
}
return AuAtomicCompareExchange(&this->state_, iCurState + 1, iCurState) == iCurState;
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::TryLockWrite()
{
for (AU_ITERATE_N(i, kRWThreadWriterHardContextSwitchBias))
{
auto curVal = this->state_;
if (curVal < 0)
{
if constexpr (!bIsWriteRecursionAllowed)
{
AuThreading::ContextYield();
continue;
}
else
{
if (this->reentrantWriteLockHandle_ == GetThreadCookie())
{
AuAtomicSub(&this->state_, 1);
return true;
}
else
{
AuThreading::ContextYield();
continue;
}
}
}
if (curVal != 0)
{
continue;
}
if (AuAtomicCompareExchange(&this->state_, -1, curVal) == curVal)
{
this->reentrantWriteLockHandle_ = GetThreadCookie();
return true;
}
}
return false;
}
template<bool bIsWriteRecursionAllowed>
void RWLockImpl<bIsWriteRecursionAllowed>::UnlockRead()
{
AuInt32 uVal {};
if (this->state_ < 0)
{
SysAssertDbg(this->reentrantWriteLockHandle_ == GetThreadCookie());
return;
}
uVal = AuAtomicSub(&this->state_, 1);
bool bAlt {};
if constexpr (bIsWriteRecursionAllowed)
{
bAlt = uVal == 1;
}
if (uVal == 0 || bAlt)
{
bool bElevation {};
if (!gUseFutexRWLock)
{
AU_LOCK_GUARD(this->mutex_); /* actually locking this->state_, out of branch. required for the mutually exclusive correctness of the condition. this is a fence. */
bElevation = this->writersPending_ > 0;
}
else
{
/* atomic read */
bElevation = this->writersPending_ > 0;
}
if (bElevation)
{
this->SignalOneWriter();
}
else
{
this->SignalManyReader();
}
}
}
template<bool bIsWriteRecursionAllowed>
void RWLockImpl<bIsWriteRecursionAllowed>::UnlockWrite()
{
bool bElevationPending {};
if constexpr (!bIsWriteRecursionAllowed)
{
this->reentrantWriteLockHandle_ = 0;
if (!gUseFutexRWLock)
{
AU_LOCK_GUARD(this->mutex_);
#if defined(AURORA_COMPILER_MSVC)
this->state_ = 0;
#else
__sync_lock_release(&this->state_);
#endif
bElevationPending = this->writersPending_ > 0;
}
else
{
bElevationPending = this->writersPending_ > 0;
#if defined(AURORA_COMPILER_MSVC)
this->state_ = 0;
#else
__sync_lock_release(&this->state_);
#endif
}
if (bElevationPending)
{
this->SignalOneWriter();
}
else
{
this->SignalManyReader();
}
}
else
{
AuInt32 val {};
// love me cas
{
AuInt32 curVal {};
do
{
curVal = this->state_;
if (curVal != -1)
{
continue;
}
this->reentrantWriteLockHandle_ = 0;
}
while (AuAtomicCompareExchange(&this->state_, val = (curVal + 1), curVal) != curVal);
}
if (val == 0)
{
if (!gUseFutexRWLock)
{
AU_LOCK_GUARD(this->mutex_);
bElevationPending = this->writersPending_ > 0;
}
else
{
bElevationPending = this->writersPending_ > 0;
}
if (bElevationPending)
{
this->SignalOneWriter();
}
else
{
this->SignalManyReader();
}
}
}
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::UpgradeReadToWrite(AuUInt64 uTimeout)
{
if (this->state_ == 1)
{
if (gUseFutexRWLock)
{
if (this->UpgradeReadToWriteDoUpgrade())
{
return true;
}
}
else
{
AU_LOCK_GUARD(this->mutex_);
if (this->UpgradeReadToWriteDoUpgrade())
{
return true;
}
}
}
auto uEndTime = uTimeout ? AuTime::SteadyClockNS() + uTimeout : 0;
if (!gUseFutexRWLock)
{
AU_LOCK_GUARD(this->mutex_);
AuAtomicAdd(&this->writersPending_, 1);
while (this->state_ != 1)
{
AuInt64 iSecondTimeout {};
if (uTimeout)
{
iSecondTimeout = AuInt64(uEndTime) - AuTime::SteadyClockNS();
if (iSecondTimeout <= 0)
{
AuAtomicSub(&this->writersPending_, 1);
return false;
}
}
#if defined(AURWLOCK_NO_SIZE_OPTIMIZED_CONDVAR)
if (!this->GetConditionWriter().WaitForSignalNS(iSecondTimeout))
#else
if (!this->GetConditionWriter().WaitForSignalNsEx(&this->mutex_, iSecondTimeout))
#endif
{
AuAtomicSub(&this->writersPending_, 1);
return false;
}
}
AuAtomicSub(&this->writersPending_, 1);
return this->UpgradeReadToWriteDoUpgrade();
}
else
{
while (true)
{
auto pSemaphore = this->GetFutexConditionWriter();
AuInt32 iCurState;
while ((iCurState = this->state_) != 1)
{
bool bStatusTwo {};
bool bStatus {};
AuAtomicAdd(&this->writersPending_, 1);
static const AuUInt32 kExpect { 0 };
RWLOCK_REORDER_BARRIER();
if ((iCurState = this->state_) == 1)
{
bStatus = true;
bStatusTwo = true;
}
else
{
bStatus = WaitOnAddress(pSemaphore, &kExpect, sizeof(kExpect), uEndTime);
}
AuAtomicSub(&this->writersPending_, 1);
if (!bStatus)
{
return false;
}
if (!bStatusTwo)
{
while (true)
{
auto uState = *pSemaphore;
if (uState == 0)
{
break;
}
if (AuAtomicCompareExchange(pSemaphore, uState - 1, uState) == uState)
{
break;
}
}
}
}
if (this->UpgradeReadToWriteDoUpgrade())
{
return true;
}
}
}
/* unreachable */
return false;
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::UpgradeReadToWriteDoUpgrade()
{
if (AuAtomicCompareExchange(&this->state_, -1, 1) == 1)
{
this->reentrantWriteLockHandle_ = GetThreadCookie();
return true;
}
else
{
return false;
}
}
template<bool bIsWriteRecursionAllowed>
bool RWLockImpl<bIsWriteRecursionAllowed>::DowngradeWriteToRead()
{
if (gUseFutexRWLock)
{
if (AuAtomicCompareExchange(&this->state_, 1, -1) == -1)
{
this->SignalManyReader();
return true;
}
}
AU_LOCK_GUARD(this->mutex_);
if (AuAtomicCompareExchange(&this->state_, 1, -1) == -1)
{
this->SignalManyReader();
return true;
}
else
{
return false;
}
}
template<bool bIsWriteRecursionAllowed>
IWaitable *RWLockImpl<bIsWriteRecursionAllowed>::AsReadable()
{
return &this->read_;
}
template<bool bIsWriteRecursionAllowed>
IWaitable *RWLockImpl<bIsWriteRecursionAllowed>::AsWritable()
{
return &this->write_;
}
AUKN_SYM IRWLock *RWLockNew()
{
auto pRwLock = _new RWLockImpl<false>();
if (!pRwLock)
{
return nullptr;
}
return pRwLock;
}
AUKN_SYM void RWLockRelease(IRWLock *pRwLock)
{
AuSafeDelete<RWLockImpl<false> *>(pRwLock);
}
AUKN_SYM IRWLock *RWRenterableLockNew()
{
auto pRwLock = _new RWLockImpl<true>();
if (!pRwLock)
{
return nullptr;
}
return pRwLock;
}
AUKN_SYM void RWRenterableLockRelease(IRWLock *pRwLock)
{
AuSafeDelete<RWLockImpl<true> *>(pRwLock);
}
AUROXTL_INTERFACE_SOO_SRC_EX(AURORA_SYMBOL_EXPORT, RWRenterableLock, RWLockImpl<true>)
AUROXTL_INTERFACE_SOO_SRC_EX(AURORA_SYMBOL_EXPORT, RWLock, RWLockImpl<false>)
}