AuroraRuntime/Include/Aurora/Threading/Waitables/FutexWaitable.hpp
2024-05-28 19:28:08 +01:00

405 lines
10 KiB
C++

/***
Copyright (C) 2023 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: FutexWaitable.hpp
Date: 2023-08-19
Author: Reece
***/
#pragma once
#include "../WakeOnAddress.hpp"
#include "../SpinTime.hpp"
#include "_.inl"
namespace Aurora::Threading::Waitables
{
struct FutexWaitable final : IWaitable
{
AURT_WAITABLE_NULL_CTORS_DEF(FutexWaitable)
inline bool TryLockNoSpin()
{
return AuAtomicTestAndSet(&this->uAtomicState, 0u) == 0;
}
inline bool TryLock() override
{
static const AuUInt32 kRef { 1 };
if (TryLockNoSpin())
{
return true;
}
return TryWaitOnAddressEx((const void *)&this->uAtomicState,
&kRef,
sizeof(kRef),
[&](const void *pTargetAddress,
const void *pCompareAddress,
AuUInt8 uWordSize)
{
return this->TryLockNoSpin();
});
}
inline bool HasOSHandle(AuMach &mach) override
{
return false;
}
inline bool HasLockImplementation() override
{
return true;
}
inline void Unlock() override
{
AuAtomicClearU8Lock(&this->uAtomicState);
if (auto uSleeping = AuAtomicLoad(&this->uAtomicSleeping))
{
WakeOnAddress((const void *)&this->uAtomicState);
}
}
inline void Lock() override
{
static const AuUInt32 kRef { 1 };
if (TryLock())
{
return;
}
while (!TryLockNoSpin())
{
AuAtomicAdd(&this->uAtomicSleeping, 1u);
WaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef), 0, true);
AuAtomicSub(&this->uAtomicSleeping, 1u);
}
}
inline bool LockMS(AuUInt64 qwTimeout) override
{
return LockNS(AuMSToNS<AuUInt64>(qwTimeout));
}
inline bool LockAbsMS(AuUInt64 qwTimeout) override
{
return LockAbsNS(AuMSToNS<AuUInt64>(qwTimeout));
}
inline bool LockNS(AuUInt64 qwTimeout) override
{
static const AuUInt32 kRef { 1 };
if (TryLockNoSpin())
{
return true;
}
auto qwEndTime = Time::SteadyClockNS() + qwTimeout;
if (TryLock())
{
return true;
}
while (!TryLockNoSpin())
{
bool bStatus {};
AuAtomicAdd(&this->uAtomicSleeping, 1u);
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwEndTime, true);
AuAtomicSub(&this->uAtomicSleeping, 1u);
if (!bStatus)
{
return TryLockNoSpin();
}
}
return true;
}
inline bool LockAbsNS(AuUInt64 qwTimeoutAbs) override
{
static const AuUInt32 kRef { 1 };
if (TryLock())
{
return true;
}
while (!TryLockNoSpin())
{
bool bStatus {};
AuAtomicAdd(&this->uAtomicSleeping, 1u);
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwTimeoutAbs, true);
AuAtomicSub(&this->uAtomicSleeping, 1u);
if (!bStatus)
{
return TryLockNoSpin();
}
}
return true;
}
AuAUInt32 uAtomicState {};
AuAUInt32 uAtomicSleeping {};
};
struct FutexWaitableNoVTblMovable final
{
AURT_WAITABLE_NULL_CTORS_DEF(FutexWaitableNoVTblMovable)
inline bool TryLockNoSpin()
{
return AuAtomicTestAndSet(&this->uAtomicState, 0u) == 0;
}
inline bool TryLock()
{
static const AuUInt32 kRef { 1 };
if (TryLockNoSpin())
{
return true;
}
return TryWaitOnAddressEx((const void *)&this->uAtomicState,
&kRef,
sizeof(kRef),
[&](const void *pTargetAddress,
const void *pCompareAddress,
AuUInt8 uWordSize)
{
return this->TryLockNoSpin();
});
}
inline void Unlock()
{
AuAtomicClearU8Lock(&this->uAtomicState);
if (auto uSleeping = AuAtomicLoad(&this->uAtomicSleeping))
{
WakeOnAddress((const void *)&this->uAtomicState);
}
}
inline void Lock()
{
static const AuUInt32 kRef { 1 };
if (TryLock())
{
return;
}
while (!TryLockNoSpin())
{
AuAtomicAdd(&this->uAtomicSleeping, 1u);
WaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef), 0, true);
AuAtomicSub(&this->uAtomicSleeping, 1u);
}
}
inline bool LockMS(AuUInt64 qwTimeout)
{
return LockNS(AuMSToNS<AuUInt64>(qwTimeout));
}
inline bool LockAbsMS(AuUInt64 qwTimeout)
{
return LockAbsNS(AuMSToNS<AuUInt64>(qwTimeout));
}
inline bool LockNS(AuUInt64 qwTimeout)
{
static const AuUInt32 kRef { 1 };
if (TryLockNoSpin())
{
return true;
}
auto qwEndTime = Time::SteadyClockNS() + qwTimeout;
if (TryLock())
{
return true;
}
while (!TryLockNoSpin())
{
bool bStatus {};
AuAtomicAdd(&this->uAtomicSleeping, 1u);
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwEndTime, true);
AuAtomicSub(&this->uAtomicSleeping, 1u);
if (!bStatus)
{
return TryLockNoSpin();
}
}
return true;
}
inline bool LockAbsNS(AuUInt64 qwTimeoutAbs)
{
static const AuUInt32 kRef { 1 };
if (TryLock())
{
return true;
}
while (!TryLockNoSpin())
{
bool bStatus {};
AuAtomicAdd(&this->uAtomicSleeping, 1u);
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwTimeoutAbs, true);
AuAtomicSub(&this->uAtomicSleeping, 1u);
if (!bStatus)
{
return TryLockNoSpin();
}
}
return true;
}
AuAUInt32 uAtomicState {};
AuAUInt32 uAtomicSleeping {};
};
struct FutexWaitableNoVTblMovableSmallest final
{
AURT_WAITABLE_NULL_CTORS_DEF(FutexWaitableNoVTblMovableSmallest)
inline bool TryLockNoSpin()
{
return AuAtomicTestAndSet(&this->uAtomicState, 0u) == 0;
}
inline bool TryLock()
{
static const AuUInt32 kRef { 1 };
if (TryLockNoSpin())
{
return true;
}
return TryWaitOnAddressEx((const void *)&this->uAtomicState,
&kRef,
sizeof(kRef),
[&](const void *pTargetAddress,
const void *pCompareAddress,
AuUInt8 uWordSize)
{
return this->TryLockNoSpin();
});
}
inline void Unlock()
{
AuAtomicClearU8Lock(&this->uAtomicState);
WakeOnAddress((const void *)&this->uAtomicState);
}
inline void Lock()
{
static const AuUInt32 kRef { 1 };
if (TryLock())
{
return;
}
while (!TryLockNoSpin())
{
WaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef), 0, true);
}
}
inline bool LockMS(AuUInt64 qwTimeout)
{
return LockNS(AuMSToNS<AuUInt64>(qwTimeout));
}
inline bool LockAbsMS(AuUInt64 qwTimeout)
{
return LockAbsNS(AuMSToNS<AuUInt64>(qwTimeout));
}
inline bool LockNS(AuUInt64 qwTimeout)
{
static const AuUInt32 kRef { 1 };
if (TryLockNoSpin())
{
return true;
}
auto qwEndTime = Time::SteadyClockNS() + qwTimeout;
if (TryLock())
{
return true;
}
while (!TryLockNoSpin())
{
bool bStatus {};
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwEndTime, true);
if (!bStatus)
{
return TryLockNoSpin();
}
}
return true;
}
inline bool LockAbsNS(AuUInt64 qwTimeoutAbs)
{
static const AuUInt32 kRef { 1 };
if (TryLock())
{
return true;
}
while (!TryLockNoSpin())
{
bool bStatus {};
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwTimeoutAbs, true);
if (!bStatus)
{
return TryLockNoSpin();
}
}
return true;
}
// Note: a reference U16/U8 reference impl would be pointless - threefold
// 1) x86_64 chokes on 16bit atomics;
// 2) 32bit RISC ISAs would prefer 32bit words;
// 3) sub 32bit words are just going to junk up our alignment & introduce pointless padding
AuAUInt32 uAtomicState {};
};
}