2023-08-19 18:54:56 +00:00
|
|
|
/***
|
|
|
|
Copyright (C) 2023 J Reece Wilson (a/k/a "Reece"). All rights reserved.
|
|
|
|
|
|
|
|
File: FutexWaitable.hpp
|
|
|
|
Date: 2023-08-19
|
|
|
|
Author: Reece
|
|
|
|
***/
|
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#include "../WakeOnAddress.hpp"
|
|
|
|
#include "../SpinTime.hpp"
|
2024-05-28 18:28:08 +00:00
|
|
|
#include "_.inl"
|
2023-08-19 18:54:56 +00:00
|
|
|
|
|
|
|
namespace Aurora::Threading::Waitables
|
|
|
|
{
|
|
|
|
struct FutexWaitable final : IWaitable
|
|
|
|
{
|
2024-05-28 18:28:08 +00:00
|
|
|
AURT_WAITABLE_NULL_CTORS_DEF(FutexWaitable)
|
2023-08-19 18:54:56 +00:00
|
|
|
|
2023-08-20 08:47:31 +00:00
|
|
|
inline bool TryLockNoSpin()
|
|
|
|
{
|
|
|
|
return AuAtomicTestAndSet(&this->uAtomicState, 0u) == 0;
|
|
|
|
}
|
|
|
|
|
2023-08-19 18:54:56 +00:00
|
|
|
inline bool TryLock() override
|
|
|
|
{
|
2023-11-14 14:44:56 +00:00
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
2023-08-20 08:47:31 +00:00
|
|
|
if (TryLockNoSpin())
|
2023-08-19 18:54:56 +00:00
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-11-14 14:44:56 +00:00
|
|
|
return TryWaitOnAddressEx((const void *)&this->uAtomicState,
|
|
|
|
&kRef,
|
|
|
|
sizeof(kRef),
|
|
|
|
[&](const void *pTargetAddress,
|
|
|
|
const void *pCompareAddress,
|
|
|
|
AuUInt8 uWordSize)
|
|
|
|
{
|
|
|
|
return this->TryLockNoSpin();
|
|
|
|
});
|
2023-08-19 18:54:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
inline bool HasOSHandle(AuMach &mach) override
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool HasLockImplementation() override
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void Unlock() override
|
|
|
|
{
|
2023-08-23 19:20:12 +00:00
|
|
|
AuAtomicClearU8Lock(&this->uAtomicState);
|
2023-08-19 18:54:56 +00:00
|
|
|
|
2023-08-23 19:20:12 +00:00
|
|
|
if (auto uSleeping = AuAtomicLoad(&this->uAtomicSleeping))
|
2023-08-19 18:54:56 +00:00
|
|
|
{
|
2023-08-20 08:47:31 +00:00
|
|
|
WakeOnAddress((const void *)&this->uAtomicState);
|
2023-08-19 18:54:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void Lock() override
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
2023-08-20 08:47:31 +00:00
|
|
|
if (TryLock())
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (!TryLockNoSpin())
|
2023-08-19 18:54:56 +00:00
|
|
|
{
|
|
|
|
AuAtomicAdd(&this->uAtomicSleeping, 1u);
|
2023-10-30 14:50:28 +00:00
|
|
|
WaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef), 0, true);
|
2023-08-19 18:54:56 +00:00
|
|
|
AuAtomicSub(&this->uAtomicSleeping, 1u);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockMS(AuUInt64 qwTimeout) override
|
|
|
|
{
|
|
|
|
return LockNS(AuMSToNS<AuUInt64>(qwTimeout));
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockAbsMS(AuUInt64 qwTimeout) override
|
|
|
|
{
|
|
|
|
return LockAbsNS(AuMSToNS<AuUInt64>(qwTimeout));
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockNS(AuUInt64 qwTimeout) override
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
2023-08-20 08:47:31 +00:00
|
|
|
|
2023-08-20 16:11:50 +00:00
|
|
|
if (TryLockNoSpin())
|
2023-08-19 18:54:56 +00:00
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto qwEndTime = Time::SteadyClockNS() + qwTimeout;
|
|
|
|
|
2023-08-20 16:11:50 +00:00
|
|
|
if (TryLock())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-08-20 08:47:31 +00:00
|
|
|
while (!TryLockNoSpin())
|
2023-08-19 18:54:56 +00:00
|
|
|
{
|
|
|
|
bool bStatus {};
|
|
|
|
|
|
|
|
AuAtomicAdd(&this->uAtomicSleeping, 1u);
|
2023-10-30 14:50:28 +00:00
|
|
|
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwEndTime, true);
|
2023-08-19 18:54:56 +00:00
|
|
|
AuAtomicSub(&this->uAtomicSleeping, 1u);
|
|
|
|
|
|
|
|
if (!bStatus)
|
|
|
|
{
|
2023-08-20 08:47:31 +00:00
|
|
|
return TryLockNoSpin();
|
2023-08-19 18:54:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockAbsNS(AuUInt64 qwTimeoutAbs) override
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
2023-08-20 08:47:31 +00:00
|
|
|
|
|
|
|
if (TryLock())
|
2023-08-19 18:54:56 +00:00
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-08-20 08:47:31 +00:00
|
|
|
while (!TryLockNoSpin())
|
2023-08-19 18:54:56 +00:00
|
|
|
{
|
|
|
|
bool bStatus {};
|
|
|
|
|
|
|
|
AuAtomicAdd(&this->uAtomicSleeping, 1u);
|
2023-10-30 14:50:28 +00:00
|
|
|
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwTimeoutAbs, true);
|
2023-08-19 18:54:56 +00:00
|
|
|
AuAtomicSub(&this->uAtomicSleeping, 1u);
|
|
|
|
|
|
|
|
if (!bStatus)
|
|
|
|
{
|
2023-08-20 08:47:31 +00:00
|
|
|
return TryLockNoSpin();
|
2023-08-19 18:54:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-09-02 03:55:43 +00:00
|
|
|
AuAUInt32 uAtomicState {};
|
|
|
|
AuAUInt32 uAtomicSleeping {};
|
2023-08-19 18:54:56 +00:00
|
|
|
};
|
2024-04-16 03:21:42 +00:00
|
|
|
|
|
|
|
struct FutexWaitableNoVTblMovable final
|
|
|
|
{
|
2024-05-28 18:28:08 +00:00
|
|
|
AURT_WAITABLE_NULL_CTORS_DEF(FutexWaitableNoVTblMovable)
|
2024-04-16 03:21:42 +00:00
|
|
|
|
|
|
|
inline bool TryLockNoSpin()
|
|
|
|
{
|
|
|
|
return AuAtomicTestAndSet(&this->uAtomicState, 0u) == 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool TryLock()
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
|
|
|
if (TryLockNoSpin())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return TryWaitOnAddressEx((const void *)&this->uAtomicState,
|
|
|
|
&kRef,
|
|
|
|
sizeof(kRef),
|
|
|
|
[&](const void *pTargetAddress,
|
|
|
|
const void *pCompareAddress,
|
|
|
|
AuUInt8 uWordSize)
|
|
|
|
{
|
|
|
|
return this->TryLockNoSpin();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void Unlock()
|
|
|
|
{
|
|
|
|
AuAtomicClearU8Lock(&this->uAtomicState);
|
|
|
|
|
|
|
|
if (auto uSleeping = AuAtomicLoad(&this->uAtomicSleeping))
|
|
|
|
{
|
|
|
|
WakeOnAddress((const void *)&this->uAtomicState);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void Lock()
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
|
|
|
if (TryLock())
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (!TryLockNoSpin())
|
|
|
|
{
|
|
|
|
AuAtomicAdd(&this->uAtomicSleeping, 1u);
|
|
|
|
WaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef), 0, true);
|
|
|
|
AuAtomicSub(&this->uAtomicSleeping, 1u);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockMS(AuUInt64 qwTimeout)
|
|
|
|
{
|
|
|
|
return LockNS(AuMSToNS<AuUInt64>(qwTimeout));
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockAbsMS(AuUInt64 qwTimeout)
|
|
|
|
{
|
|
|
|
return LockAbsNS(AuMSToNS<AuUInt64>(qwTimeout));
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockNS(AuUInt64 qwTimeout)
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
|
|
|
if (TryLockNoSpin())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto qwEndTime = Time::SteadyClockNS() + qwTimeout;
|
|
|
|
|
|
|
|
if (TryLock())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (!TryLockNoSpin())
|
|
|
|
{
|
|
|
|
bool bStatus {};
|
|
|
|
|
|
|
|
AuAtomicAdd(&this->uAtomicSleeping, 1u);
|
|
|
|
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwEndTime, true);
|
|
|
|
AuAtomicSub(&this->uAtomicSleeping, 1u);
|
|
|
|
|
|
|
|
if (!bStatus)
|
|
|
|
{
|
|
|
|
return TryLockNoSpin();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockAbsNS(AuUInt64 qwTimeoutAbs)
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
|
|
|
if (TryLock())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (!TryLockNoSpin())
|
|
|
|
{
|
|
|
|
bool bStatus {};
|
|
|
|
|
|
|
|
AuAtomicAdd(&this->uAtomicSleeping, 1u);
|
|
|
|
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwTimeoutAbs, true);
|
|
|
|
AuAtomicSub(&this->uAtomicSleeping, 1u);
|
|
|
|
|
|
|
|
if (!bStatus)
|
|
|
|
{
|
|
|
|
return TryLockNoSpin();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
AuAUInt32 uAtomicState {};
|
|
|
|
AuAUInt32 uAtomicSleeping {};
|
|
|
|
};
|
|
|
|
|
|
|
|
struct FutexWaitableNoVTblMovableSmallest final
|
|
|
|
{
|
2024-05-28 18:28:08 +00:00
|
|
|
AURT_WAITABLE_NULL_CTORS_DEF(FutexWaitableNoVTblMovableSmallest)
|
2024-04-16 03:21:42 +00:00
|
|
|
|
|
|
|
inline bool TryLockNoSpin()
|
|
|
|
{
|
|
|
|
return AuAtomicTestAndSet(&this->uAtomicState, 0u) == 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool TryLock()
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
|
|
|
if (TryLockNoSpin())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return TryWaitOnAddressEx((const void *)&this->uAtomicState,
|
|
|
|
&kRef,
|
|
|
|
sizeof(kRef),
|
|
|
|
[&](const void *pTargetAddress,
|
|
|
|
const void *pCompareAddress,
|
|
|
|
AuUInt8 uWordSize)
|
|
|
|
{
|
|
|
|
return this->TryLockNoSpin();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void Unlock()
|
|
|
|
{
|
|
|
|
AuAtomicClearU8Lock(&this->uAtomicState);
|
|
|
|
WakeOnAddress((const void *)&this->uAtomicState);
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void Lock()
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
|
|
|
if (TryLock())
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (!TryLockNoSpin())
|
|
|
|
{
|
|
|
|
WaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef), 0, true);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockMS(AuUInt64 qwTimeout)
|
|
|
|
{
|
|
|
|
return LockNS(AuMSToNS<AuUInt64>(qwTimeout));
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockAbsMS(AuUInt64 qwTimeout)
|
|
|
|
{
|
|
|
|
return LockAbsNS(AuMSToNS<AuUInt64>(qwTimeout));
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockNS(AuUInt64 qwTimeout)
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
|
|
|
if (TryLockNoSpin())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto qwEndTime = Time::SteadyClockNS() + qwTimeout;
|
|
|
|
|
|
|
|
if (TryLock())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (!TryLockNoSpin())
|
|
|
|
{
|
|
|
|
bool bStatus {};
|
|
|
|
|
|
|
|
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwEndTime, true);
|
|
|
|
|
|
|
|
if (!bStatus)
|
|
|
|
{
|
|
|
|
return TryLockNoSpin();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool LockAbsNS(AuUInt64 qwTimeoutAbs)
|
|
|
|
{
|
|
|
|
static const AuUInt32 kRef { 1 };
|
|
|
|
|
|
|
|
if (TryLock())
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (!TryLockNoSpin())
|
|
|
|
{
|
|
|
|
bool bStatus {};
|
|
|
|
|
|
|
|
bStatus = WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), qwTimeoutAbs, true);
|
|
|
|
|
|
|
|
if (!bStatus)
|
|
|
|
{
|
|
|
|
return TryLockNoSpin();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Note: a reference U16/U8 reference impl would be pointless - threefold
|
|
|
|
// 1) x86_64 chokes on 16bit atomics;
|
|
|
|
// 2) 32bit RISC ISAs would prefer 32bit words;
|
|
|
|
// 3) sub 32bit words are just going to junk up our alignment & introduce pointless padding
|
|
|
|
AuAUInt32 uAtomicState {};
|
|
|
|
};
|
2023-08-19 18:54:56 +00:00
|
|
|
}
|