2023-03-12 15:27:28 +00:00
/***
Copyright ( C ) 2023 J Reece Wilson ( a / k / a " Reece " ) . All rights reserved .
File : AuWakeOnAddress . hpp
Date : 2023 - 3 - 10
Author : Reece
* * */
# pragma once
2024-01-22 08:32:30 +00:00
# include "Primitives/AuWoASemaphore.hpp"
2023-03-12 15:27:28 +00:00
# include "Primitives/AuConditionMutex.Generic.hpp"
# include "Primitives/AuConditionVariable.Generic.hpp"
2024-01-23 18:21:10 +00:00
# include "Primitives/AuSemaphore.Generic.hpp"
2023-03-12 15:27:28 +00:00
2024-03-12 22:50:22 +00:00
# if defined(AURORA_COMPILER_MSVC)
# define WOAFAST __declspec(safebuffers) auline
# define WOAFASTPUB AUKN_SYM __declspec(safebuffers) auline
# else
# define WOAFAST auline
# define WOAFASTPUB AUKN_SYM
# endif
2023-03-21 03:18:09 +00:00
namespace Aurora : : Threading
2023-03-12 15:27:28 +00:00
{
2023-12-01 01:35:10 +00:00
static const auto kDefaultWaitPerProcess = 128 ;
2024-03-12 22:50:22 +00:00
static const auto kMax64 = 0xFFFFFFFFFFFFFFFFull ;
2024-04-28 13:13:09 +00:00
static const auto kPlatformFutexNoForcedAlignedU32 = AuBuild : : kIsNTDerived ;
2023-03-12 15:27:28 +00:00
struct WaitState ;
struct WaitBuffer
{
2023-10-30 14:50:28 +00:00
char buffer [ 32 ] ;
2023-03-12 15:27:28 +00:00
AuUInt8 uSize ;
2024-03-12 22:50:22 +00:00
WOAFAST static WaitBuffer From ( const void * pBuf , AuUInt8 uSize ) ;
WOAFAST static bool Compare ( const void * pHotAddress , AuUInt8 uSize , WaitState & state ) ;
WOAFAST static bool Compare ( const void * pHotAddress , AuUInt8 uSize , const void * pCompare , AuUInt64 uMask , EWaitMethod eMethod ) ;
2023-10-30 14:50:28 +00:00
2024-03-12 22:50:22 +00:00
// returns false when valid
template < EWaitMethod eMethod , bool bFast = false >
WOAFAST static bool Compare2 ( const void * pHotAddress , AuUInt8 uSize , const void * pReference , AuUInt64 uMask = 0xFFFFFFFFFFFFFFFF ) ;
2024-06-19 21:05:17 +00:00
template < EWaitMethod eMethod , bool bFast = false >
WOAFAST static bool Compare2 ( const volatile void * pHotAddress , AuUInt8 uSize , const void * pReference , AuUInt64 uMask = 0xFFFFFFFFFFFFFFFF ) ;
2023-03-12 15:27:28 +00:00
} ;
struct WaitState
{
WaitBuffer compare ;
2023-06-15 19:44:27 +00:00
//AuOptionalEx<AuUInt64> qwNanoseconds;
AuOptionalEx < AuUInt64 > qwNanosecondsAbs ;
2024-03-12 22:50:22 +00:00
AuUInt64 uDownsizeMask { 0xFFFFFFFFFFFFFFFF } ;
2023-03-13 23:57:32 +00:00
AuUInt32 uWordSize { } ;
2024-03-02 23:23:16 +00:00
const void * pCompare2 { } ;
2024-03-12 22:50:22 +00:00
EWaitMethod eWaitMethod { EWaitMethod : : eNotEqual } ;
2023-03-12 15:27:28 +00:00
} ;
struct WaitEntry
{
WaitEntry ( ) ;
~ WaitEntry ( ) ;
2024-06-19 21:05:17 +00:00
WaitEntry * volatile pNext { } ;
WaitEntry * volatile pBefore { } ;
2023-06-12 17:31:10 +00:00
2023-03-12 15:27:28 +00:00
// synch
2023-12-10 19:25:31 +00:00
# if defined(WOA_SEMAPHORE_MODE)
2024-01-22 08:32:30 +00:00
# if !defined(WOA_SEMAPHORE_SEMAPHORE)
Primitives : : Semaphore semaphore ;
# else
// Recommended for XNU targets:
WOA_SEMAPHORE_SEMAPHORE semaphore ;
# endif
2023-12-10 19:25:31 +00:00
# else
// Recommended (we can better filter spurious wakes for the cost of a barrier on signal):
2024-01-22 08:32:30 +00:00
// !!! we also prefer to block the containers mutex while we signal each thread individually !!!
// !!! for the sake of optimizing for windows xp - 7, its far nicer to optimize the entire signaling and wait operations under a container lock, than it is to buffer shared pointers or externally managed memory out of the lock scope !!!
// !!! also note: container spinlocks =/= WaitEntry::mutex !!
# if !defined(WOA_CONDVAR_MUTEX)
Primitives : : ConditionMutexInternal mutex ; // mutex ctor must come before var
Primitives : : ConditionVariableInternal variable ; // ...and something all 2007+ micro and monolithic kernels should have; an event or semaphore primitive on which we can form a crude condvar
# else
WOA_CONDVAR_MUTEX mutex ;
WOA_CONDVAR_VARIABLE variable ;
# endif
2023-12-10 19:25:31 +00:00
# endif
2023-03-12 15:27:28 +00:00
// state
const void * pAddress { } ;
AuUInt8 uSize { } ;
2024-03-02 23:23:16 +00:00
const void * pCompareAddress { } ;
2024-03-12 22:50:22 +00:00
EWaitMethod eWaitMethod { EWaitMethod : : eNotEqual } ;
2023-03-12 15:27:28 +00:00
// bookkeeping (parent container)
2024-06-23 03:08:58 +00:00
volatile AuUInt8 bAlive { } ; // wait entry validity. must be rechecked for each spurious or expected wake, if the comparison doesn't break the yield loop.
// if false, and we're still yielding under pCompare == pAddress, we must reschedule with inverse order (as to steal the next signal, as opposed to waiting last)
2023-03-12 15:27:28 +00:00
void Release ( ) ;
2024-03-12 22:50:22 +00:00
template < EWaitMethod eMethod >
2023-03-12 15:27:28 +00:00
bool SleepOn ( WaitState & state ) ;
2023-12-05 07:56:57 +00:00
bool TrySignalAddress ( const void * pAddress ) ;
2023-03-12 15:27:28 +00:00
} ;
2023-06-12 17:31:10 +00:00
struct ProcessListWait
{
WaitEntry * pHead { } ;
WaitEntry * pTail { } ;
} ;
2023-07-30 08:34:39 +00:00
struct ProcessWaitNodeContainer
2023-03-12 15:27:28 +00:00
{
AuUInt32 uAtomic { } ;
2023-06-12 17:31:10 +00:00
ProcessListWait waitList ;
2023-03-12 15:27:28 +00:00
2024-03-12 22:50:22 +00:00
WaitEntry * WaitBufferFrom ( const void * pAddress , AuUInt8 uSize , bool bScheduleFirst , const void * pAddressCompare , EWaitMethod eWaitMethod ) ;
2023-03-12 15:27:28 +00:00
2023-06-12 17:31:10 +00:00
template < typename T >
bool IterateWake ( T callback ) ;
2023-11-17 23:06:08 +00:00
void RemoveSelf ( WaitEntry * pSelf ) ;
2023-12-21 15:54:05 +00:00
void RemoveEntry ( WaitEntry * pSelf , bool bAllUnderLock ) ;
2023-11-17 23:06:08 +00:00
2023-03-12 15:27:28 +00:00
void Lock ( ) ;
void Unlock ( ) ;
} ;
2023-07-30 08:34:39 +00:00
struct ProcessWaitContainer
{
ProcessWaitNodeContainer list [ kDefaultWaitPerProcess ] ;
2024-03-12 22:50:22 +00:00
WaitEntry * WaitBufferFrom ( const void * pAddress , AuUInt8 uSize , bool bScheduleFirst , const void * pAddressCompare , EWaitMethod eWaitMethod ) ;
2023-07-30 08:34:39 +00:00
template < typename T >
2023-08-19 19:37:24 +00:00
bool IterateWake ( const void * pAddress , T callback ) ;
2023-11-17 23:06:08 +00:00
void RemoveSelf ( const void * pAddress , WaitEntry * pSelf ) ;
2023-07-30 08:34:39 +00:00
} ;
2023-03-12 15:27:28 +00:00
}