diff --git a/Include/Aurora/Runtime.hpp b/Include/Aurora/Runtime.hpp index 07417c3c..4d5d992e 100644 --- a/Include/Aurora/Runtime.hpp +++ b/Include/Aurora/Runtime.hpp @@ -381,7 +381,7 @@ namespace Aurora AuUInt64 bPreferEmulatedWakeOnAddress : 1 { !AuBuild::kIsNtDerived /*everybody else requires us to hit the kernel. */ }; #endif AuUInt64 bPreferWaitOnAddressAlwaysSpin : 1 { false }; // ..., if emulated! if double-spinning under higher level locks, disable me. - AuUInt64 bPreferWaitOnAddressAlwaysSpinNative : 1 { false }; // ..., if not emulated! noting that most kernels and user-schedulers will spin for you + AuUInt64 bPreferWaitOnAddressAlwaysSpinNative : 1 { !AuBuild::kIsNtDerived }; // ..., if not emulated! noting that most kernels and user-schedulers will spin for you AuUInt64 bPreferRWLockReadLockSpin : 1 { true }; AuUInt64 bUWPNanosecondEmulationCheckFirst : 1 { false }; AuUInt64 uUWPNanosecondEmulationMaxYields : 7 { 12 }; diff --git a/Include/Aurora/Threading/Waitables/FutexBarrier.hpp b/Include/Aurora/Threading/Waitables/FutexBarrier.hpp index 3cad3980..1d728ed7 100644 --- a/Include/Aurora/Threading/Waitables/FutexBarrier.hpp +++ b/Include/Aurora/Threading/Waitables/FutexBarrier.hpp @@ -83,15 +83,21 @@ namespace Aurora::Threading::Waitables } } - if (this->TryChkNoSpin()) - { - return true; - } - - (void)TryWaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef)); - while (!this->TryChkNoSpin()) { + if (TryWaitOnAddressEx((const void *)&this->uAtomicState, + &kRef, + sizeof(kRef), + [&](const void *pTargetAddress, + const void *pCompareAddress, + AuUInt8 uWordSize) + { + return this->TryChkNoSpin(); + })) + { + return true; + } + if (!WaitOnAddressSteady((const void *)&this->uAtomicState, &kRef, sizeof(kRef), uTimeoutAbsNS)) { if (this->TryChkNoSpin()) diff --git a/Include/Aurora/Threading/Waitables/FutexCondWaitable.hpp b/Include/Aurora/Threading/Waitables/FutexCondWaitable.hpp index 4612bac9..1f230d5c 100644 --- a/Include/Aurora/Threading/Waitables/FutexCondWaitable.hpp +++ b/Include/Aurora/Threading/Waitables/FutexCondWaitable.hpp @@ -179,19 +179,22 @@ namespace Aurora::Threading::Waitables auline bool TryLock2() { + static const AuUInt32 kRef { 0 }; + if (TryLock3()) { return true; } - static const AuUInt32 kRef { 0 }; - - if (TryWaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef))) + return TryWaitOnAddressEx((const void *)&this->uAtomicState, + &kRef, + sizeof(kRef), + [&](const void *pTargetAddress, + const void *pCompareAddress, + AuUInt8 uWordSize) { - return TryLock3(); - } - - return false; + return this->TryLock3(); + }); } auline bool SleepOne(AuUInt64 qwTimeout) diff --git a/Include/Aurora/Threading/Waitables/FutexSemaphoreWaitable.hpp b/Include/Aurora/Threading/Waitables/FutexSemaphoreWaitable.hpp index 7098e828..95247007 100644 --- a/Include/Aurora/Threading/Waitables/FutexSemaphoreWaitable.hpp +++ b/Include/Aurora/Threading/Waitables/FutexSemaphoreWaitable.hpp @@ -27,18 +27,22 @@ namespace Aurora::Threading::Waitables inline bool TryLock() override { + static const AuUInt32 kRef { 0 }; + if (TryLockNoSpin()) { return true; } - static const AuUInt32 kRef { 0 }; - if (TryWaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef))) + return TryWaitOnAddressEx((const void *)&this->uAtomicState, + &kRef, + sizeof(kRef), + [&](const void *pTargetAddress, + const void *pCompareAddress, + AuUInt8 uWordSize) { - return TryLockNoSpin(); - } - - return false; + return this->TryLockNoSpin(); + }); } inline bool HasOSHandle(AuMach &mach) override diff --git a/Include/Aurora/Threading/Waitables/FutexWaitable.hpp b/Include/Aurora/Threading/Waitables/FutexWaitable.hpp index df736e14..dc114cd8 100644 --- a/Include/Aurora/Threading/Waitables/FutexWaitable.hpp +++ b/Include/Aurora/Threading/Waitables/FutexWaitable.hpp @@ -26,21 +26,22 @@ namespace Aurora::Threading::Waitables inline bool TryLock() override { + static const AuUInt32 kRef { 1 }; + if (TryLockNoSpin()) { return true; } - static const AuUInt32 kRef { 1 }; - if (TryWaitOnAddress((const void *)&this->uAtomicState, &kRef, sizeof(kRef))) - { - if (TryLockNoSpin()) - { - return true; - } - } - - return false; + return TryWaitOnAddressEx((const void *)&this->uAtomicState, + &kRef, + sizeof(kRef), + [&](const void *pTargetAddress, + const void *pCompareAddress, + AuUInt8 uWordSize) + { + return this->TryLockNoSpin(); + }); } inline bool HasOSHandle(AuMach &mach) override diff --git a/Include/Aurora/Threading/WakeOnAddress.hpp b/Include/Aurora/Threading/WakeOnAddress.hpp index bc6ebc40..5e8d7a21 100644 --- a/Include/Aurora/Threading/WakeOnAddress.hpp +++ b/Include/Aurora/Threading/WakeOnAddress.hpp @@ -39,6 +39,11 @@ namespace Aurora::Threading const void *pCompareAddress, AuUInt8 uWordSize); + AUKN_SYM bool TryWaitOnAddressEx(const void *pTargetAddress, + const void *pCompareAddress, + AuUInt8 uWordSize, + const AuFunction &check); + // Relative timeout variant of nanosecond resolution WoA. 0 = indefinite AUKN_SYM bool WaitOnAddress(const void *pTargetAddress, const void *pCompareAddress, diff --git a/Source/Threading/AuWakeOnAddress.cpp b/Source/Threading/AuWakeOnAddress.cpp index 85e0fe5b..739b4c99 100644 --- a/Source/Threading/AuWakeOnAddress.cpp +++ b/Source/Threading/AuWakeOnAddress.cpp @@ -903,10 +903,30 @@ namespace Aurora::Threading const void *pCompareAddress, AuUInt8 uWordSize) { - auto expect = WaitBuffer::From(pCompareAddress, uWordSize); return Primitives::DoTryIf([&]() { - return !expect.Compare(pTargetAddress); + return !WaitBuffer::Compare(pCompareAddress, uWordSize, pTargetAddress); + }); + } + + AUKN_SYM bool TryWaitOnAddressEx(const void *pTargetAddress, + const void *pCompareAddress, + AuUInt8 uWordSize, + const AuFunction &check) + { + if (!check) + { + return TryWaitOnAddress(pTargetAddress, pCompareAddress, uWordSize); + } + + return Primitives::DoTryIf([&]() + { + if (WaitBuffer::Compare(pCompareAddress, uWordSize, pTargetAddress)) + { + return false; + } + + return check(pTargetAddress, pCompareAddress, uWordSize); }); }