[*] Polish AuMemory

This commit is contained in:
Reece Wilson 2024-03-19 15:47:42 +00:00
parent 0b60cb8099
commit bd1283e146
32 changed files with 1099 additions and 848 deletions

View File

@ -10,6 +10,8 @@
namespace Aurora::Memory namespace Aurora::Memory
{ {
struct ProxyHeap; struct ProxyHeap;
static const AuUInt8 kHeapSize = 128;
static const AuUInt8 kHeap2Size = 255;
struct Heap struct Heap
{ {
@ -18,73 +20,110 @@ namespace Aurora::Memory
virtual HeapStats &GetStats() = 0; virtual HeapStats &GetStats() = 0;
virtual void WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg) = 0; virtual void WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg) = 0;
// Potentially slower, zero allocate
template<typename T = void *> template<typename T = void *>
T ZAlloc(Types::size_t length) T ZAlloc(Types::size_t uLength)
{ {
return reinterpret_cast<T>(_ZAlloc(length)); if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
{
return reinterpret_cast<T>(_ZAlloc(uLength));
}
else
{
return reinterpret_cast<T>(_ZAlloc(uLength, alignof(AuRemovePointer_t<T>)));
}
} }
template<typename T = void *> template<typename T = void *>
T ZAlloc(Types::size_t length, Types::size_t align) T ZAlloc(Types::size_t uLength, Types::size_t uAlignment)
{ {
return reinterpret_cast<T>(_ZAlloc(length, align)); return reinterpret_cast<T>(_ZAlloc(uLength, uAlignment));
} }
template<typename T> template<typename T>
T *ZAlloc() T *ZAlloc()
{ {
return reinterpret_cast<T *>(_ZAlloc(sizeof(T))); return reinterpret_cast<T *>(_ZAlloc(sizeof(T), alignof(T)));
} }
template<typename T> template<typename T>
T *NewArray(Types::size_t count) T *NewArray(Types::size_t uLength)
{ {
return ZAlloc<T *>(count * sizeof(T)); return ZAlloc<T *>(uLength * sizeof(T), alignof(T));
} }
template<typename T> template<typename T>
T *NewArray(Types::size_t count, Types::size_t align) T *NewArray(Types::size_t uLength, Types::size_t uAlignment)
{ {
return ZAlloc<T *>(count * sizeof(T), align); return ZAlloc<T *>(uLength * sizeof(T), uAlignment);
} }
/// Fast, unsafe alloc /// Fast, unsafe alloc
template<typename T = void *> template<typename T = void *>
T FAlloc(Types::size_t length) T FAlloc(Types::size_t uLength)
{ {
return reinterpret_cast<T>(_FAlloc(length)); if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
{
return reinterpret_cast<T>(_FAlloc(uLength));
}
else
{
return reinterpret_cast<T>(_FAlloc(uLength, alignof(AuRemovePointer_t<T>)));
}
} }
template<typename T = void *> template<typename T = void *>
T FAlloc(Types::size_t length, Types::size_t align) T FAlloc(Types::size_t uLength, Types::size_t uAlignment)
{ {
return reinterpret_cast<T>(_FAlloc(length, align)); return reinterpret_cast<T>(_FAlloc(uLength, uAlignment));
} }
template<typename T> template<typename T>
T ZRealloc(T pHead, Types::size_t length) T *FAlloc()
{ {
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), length)); return reinterpret_cast<T *>(_FAlloc(sizeof(T), alignof(T)));
}
// Reallocs
template<typename T>
T ZRealloc(T pHead, Types::size_t uLength)
{
if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
{
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), uLength));
}
else
{
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), uLength, alignof(AuRemovePointer_t<T>)));
}
} }
template<typename T> template<typename T>
T ZRealloc(T pHead, Types::size_t length, Types::size_t alloc) T ZRealloc(T pHead, Types::size_t uLength, Types::size_t uAlignment)
{ {
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), length), alloc); return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), uLength, uAlignment));
} }
template<typename T> template<typename T>
T FRealloc(T pHead, Types::size_t length) T FRealloc(T pHead, Types::size_t uLength)
{ {
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), length)); if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
{
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), uLength));
}
else
{
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), uLength, alignof(AuRemovePointer_t<T>)));
}
} }
template<typename T> template<typename T>
T FRealloc(T pHead, Types::size_t length, Types::size_t alloc) T FRealloc(T pHead, Types::size_t uLength, Types::size_t uAlignment)
{ {
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), length), alloc); return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), uLength, uAlignment));
} }
// Free
template<typename T> template<typename T>
void Free(T pHead) void Free(T pHead)
{ {
@ -115,12 +154,12 @@ namespace Aurora::Memory
auto pVoids = (void **)(((char *)pThat) - kAlignment); auto pVoids = (void **)(((char *)pThat) - kAlignment);
auto pHeap = (Heap *)pVoids[0]; auto pHeap = (Heap *)pVoids[0];
auto uCount = (AuUInt)pVoids[1]; auto uLength = (AuUInt)pVoids[1];
if constexpr (AuIsClass_v<T> && if constexpr (AuIsClass_v<T> &&
!AuIsTriviallyDestructible_v<T>) !AuIsTriviallyDestructible_v<T>)
{ {
for (AU_ITERATE_N(i, uCount)) for (AU_ITERATE_N(i, uLength))
{ {
auto &refElement = pThat[i]; auto &refElement = pThat[i];
refElement.~T(); refElement.~T();
@ -187,10 +226,20 @@ namespace Aurora::Memory
} }
*(void **)pPtr = pThat; *(void **)pPtr = pThat;
return AuSPtr<T>((T *)(pPtr + kAlignment), &Heap::DeleteThat<T>);
auto pTThat = (T *)(pPtr + kAlignment);
AUROXTL_COMMODITY_TRY
{
return AuSPtr<T>(pTThat, &Heap::DeleteThat<T>);
}
AUROXTL_COMMODITY_CATCH
{
Heap::DeleteThat<T>(pTThat);
return {};
}
} }
// note: callers can use AuHUPOf_t<T> pUniquePointer = AuNullHeapPointer<T>() // note: callers can use AuHUPOf_t<Z> pUniquePointer = AuNullHeapPointer<Z>()
template <class T, class Z = T, class ...Args> template <class T, class Z = T, class ...Args>
AuUPtr<Z, decltype(&Heap::DeleteThat<Z>)> NewClassUnique(Args &&...args) AuUPtr<Z, decltype(&Heap::DeleteThat<Z>)> NewClassUnique(Args &&...args)
@ -288,7 +337,16 @@ namespace Aurora::Memory
pVoids[0] = pThat; pVoids[0] = pThat;
pVoids[1] = (void *)uElements; pVoids[1] = (void *)uElements;
return AuSPtr<T>((T *)(pPtr + kAlignment), &Heap::DeleteThatArray<T>); auto pTThat = (T *)(pPtr + kAlignment);
AUROXTL_COMMODITY_TRY
{
return AuSPtr<T>(pTThat, &Heap::DeleteThatArray<T>);
}
AUROXTL_COMMODITY_CATCH
{
Heap::DeleteThatArray<T>(pTThat);
return {};
}
} }
// note: callers can use AuHUPOf_t<T> pUniquePointer = AuNullHeapPointer<T>() // note: callers can use AuHUPOf_t<T> pUniquePointer = AuNullHeapPointer<T>()
@ -372,31 +430,6 @@ namespace Aurora::Memory
} }
} }
template<typename T>
static AuSPtr<T> ToSmartPointer(AuSPtr<Heap> heap,
T *pHead,
bool bPinHeap = true)
{
auto handle = bPinHeap ?
heap :
AuSPtr<Heap> {};
auto pHeap = heap.get();
return AuSPtr<T>(pHead,
[handle, pHeap](T *pDeleteMe)
{
if constexpr (AuIsClass_v<T>
#if !defined(AURT_HEAP_NO_STL)
&& !std::is_trivially_destructible_v<T>
#endif
)
{
pDeleteMe->~T();
}
pHeap->Free(pDeleteMe);
});
}
template <typename T> template <typename T>
using HUPOf_t = AuUPtr<T, decltype(&Heap::DeleteThat<T>)>; using HUPOf_t = AuUPtr<T, decltype(&Heap::DeleteThat<T>)>;
@ -408,9 +441,9 @@ namespace Aurora::Memory
virtual Heap *GetSelfReferenceRaw() = 0; virtual Heap *GetSelfReferenceRaw() = 0;
virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength) = 0; virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength) = 0;
virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength, Types::size_t align) = 0; virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength, Types::size_t uAlignment) = 0;
virtual AU_ALLOC void *_FAlloc(Types::size_t uLength) = 0; virtual AU_ALLOC void *_FAlloc(Types::size_t uLength) = 0;
virtual AU_ALLOC void *_FAlloc(Types::size_t uLength, Types::size_t align) = 0; virtual AU_ALLOC void *_FAlloc(Types::size_t uLength, Types::size_t uAlignment) = 0;
virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0; virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0;
virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength) = 0; virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength) = 0;
virtual AU_ALLOC void *_FRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0; virtual AU_ALLOC void *_FRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0;
@ -432,8 +465,8 @@ namespace Aurora::Memory
}; };
/** /**
Returns a heap interface backed by the default allocator * Returns a heap interface backed by the default allocator
*/ */
AUKN_SHARED_API(DefaultDiscontiguousHeap, Heap); AUKN_SHARED_API(DefaultDiscontiguousHeap, Heap);
inline Heap *GetDefaultDiscontiguousHeap() inline Heap *GetDefaultDiscontiguousHeap()
@ -448,27 +481,28 @@ namespace Aurora::Memory
} }
/** /**
Allocates uLength amount of contiguous virtual memory * Allocates uLength amount of contiguous virtual memory
* @return a heap backed by uLength bytes of virtual memory
* @warning the SOO variant cannot guarantee release-on-last-free and will panic if uLength cannot be allocated. Use AllocHeap[Shared/Unique/New](uLength) instead.
*/
AUKN_SHARED_SOO2_NCM(AllocHeap, Heap, kHeapSize, ((AuUInt, uLength)), AuUInt uLength);
@warning Heaps are guaranteed to outlive their allocations; heaps are the one object that effectively own a single reference count on themselves. /**
Requesting termination before all of its' memory has been free will result, pHead at worst, a warning. * @warning the SOO variant cannot guarantee release-on-last-free.
Expect to leak unless all allocs have been paired by a free. */
AUKN_SHARED_SOO2_NCM(RequestHeapOfRegion, Heap, kHeapSize, ((const MemoryViewWrite &, memory)), const MemoryViewWrite &memory);
I do not expect to implement force frees simply because all our primary use cases keep track of dtors to forcefully release leaked objects. /**
Use RequestHeapOfRegion to be backed by caller owned memory. * Proxies an existing heap with encapsulated statistics.
* This is intended for debugging purposes when accurate heap stats of a heap-subset are desired.
* @warning this heap cannot guarantee release-on-last-free
*/
AUKN_SHARED_SOO2_NCM(HeapProxy, Heap, kHeap2Size, ((const AuSPtr<Heap> &, pHead)), const AuSPtr<Heap> &pHead);
@return a heap backed by uLength bytes of virtual memory /**
* Proxies an existing heap with encapsulated statistics and leak detector
* This is intended for debugging purposes when accurate heap stats of a heap-subset are desired.
* @warning this heap cannot guarantee release-on-last-free
*/ */
AUKN_SHARED_API(AllocHeap, Heap, AuUInt uLength); AUKN_SHARED_SOO2_NCM(HeapProxyEx, Heap, kHeap2Size, ((const AuSPtr<Heap> &,pHead), (LeakFinderAlloc_f, pfAlloc), (LeakFinderFree_f, pfFree)), const AuSPtr<Heap> &pHead, LeakFinderAlloc_f pfAlloc, LeakFinderFree_f pfFree);
AUKN_SHARED_API(RequestHeapOfRegion, Heap, const MemoryViewWrite &memory);
// AllocHeap but use mimalloc (or the default allocator) instead
AUKN_SHARED_API(AllocHeapMimalloc, Heap, AuUInt uLength);
// Proxies an existing heap with encapsulated statistics
AUKN_SHARED_API(HeapProxy, Heap, const AuSPtr<Heap> &pHead);
// Proxies an existing heap with encapsulated statistics and leak detector
AUKN_SHARED_API(HeapProxyEx, Heap, const AuSPtr<Heap> &pHead, LeakFinderAlloc_f pfAlloc, LeakFinderFree_f pfFree);
} }

View File

@ -32,26 +32,26 @@ namespace Aurora::Memory
LeakFinderFree_f pFree); LeakFinderFree_f pFree);
// thread-local // thread-local
AUKN_SYM void SetMemoryLowNotification(MemoryLowNotification_f pFunc); AUKN_SYM void SetMemoryLowNotification(MemoryLowNotification_f pFunc);
AUKN_SYM void ReserveHeapMemory(AuUInt uHeapSize, bool bCommit = true); AUKN_SYM void ReserveHeapMemory(AuUInt uHeapSize, bool bCommit = true);
AUKN_SYM AU_ALLOC void *_ZAlloc(Types::size_t length); AUKN_SYM AU_ALLOC void *_ZAlloc(Types::size_t uLength);
AUKN_SYM AU_ALLOC void *_ZAlloc(Types::size_t length, Types::size_t align); AUKN_SYM AU_ALLOC void *_ZAlloc(Types::size_t uLength, Types::size_t uAlignment);
AUKN_SYM AU_ALLOC void *_FAlloc(Types::size_t length); AUKN_SYM AU_ALLOC void *_FAlloc(Types::size_t uLength);
AUKN_SYM AU_ALLOC void *_FAlloc(Types::size_t length, Types::size_t align); AUKN_SYM AU_ALLOC void *_FAlloc(Types::size_t uLength, Types::size_t uAlignment);
AUKN_SYM AU_ALLOC void *_ZRealloc(void *buffer, Types::size_t length, Types::size_t align); AUKN_SYM AU_ALLOC void *_ZRealloc(void *buffer, Types::size_t uLength, Types::size_t uAlignment);
AUKN_SYM AU_ALLOC void *_ZRealloc(void *buffer, Types::size_t length); AUKN_SYM AU_ALLOC void *_ZRealloc(void *buffer, Types::size_t uLength);
AUKN_SYM AU_ALLOC void *_FRealloc(void *buffer, Types::size_t length, Types::size_t align); AUKN_SYM AU_ALLOC void *_FRealloc(void *buffer, Types::size_t uLength, Types::size_t uAlignment);
AUKN_SYM AU_ALLOC void *_FRealloc(void *buffer, Types::size_t length); AUKN_SYM AU_ALLOC void *_FRealloc(void *buffer, Types::size_t uLength);
AUKN_SYM void _Free(void *buffer); AUKN_SYM void _Free(void *buffer);
AUKN_SYM AuUInt GetChunkSize(const void *head); AUKN_SYM AuUInt GetChunkSize(const void *head);
AUKN_SYM AuUInt GetPageSize(); AUKN_SYM AuUInt GetPageSize();
static void *__FAlloc(Types::size_t length, Types::size_t align) static void *__FAlloc(Types::size_t uLength, Types::size_t uAlignment)
{ {
return _FAlloc(length, align); return _FAlloc(uLength, uAlignment);
} }
static void __Free(void *buffer) static void __Free(void *buffer)
@ -59,89 +59,126 @@ namespace Aurora::Memory
_Free(buffer); _Free(buffer);
} }
// These memory management APIs do not support class types, and will likely *never* support them
// QST already handles dynamic allocation of structs in a given heap properly (afaik)
// _new, new, et al are backed by operator overloads directed towards these functions
// -> do not double init
// TODO: ensure typeof(T) is not a pointer of a class
#if !defined(_CPPSHARP) #if !defined(_CPPSHARP)
template<typename T> template<typename T = void *>
T ZAlloc(Types::size_t length) T ZAlloc(Types::size_t uLength)
{ {
static_assert(!AuIsClass_v<AuRemovePointer_t<T>>, "Do not use heap/kmem apis with classes"); static_assert((!AuIsClass_v<AuRemovePointer_t<T>> || (AuIsTriviallyConstructible_v<AuRemovePointer_t<T>> && AuIsTriviallyDestructible_v<AuRemovePointer_t<T>>)),
return reinterpret_cast<T>(_ZAlloc(length)); "Do not use heap/kmem apis with classes. Use AuNewClass[Unique]");
} if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
template<typename T>
T ZAlloc(Types::size_t length, Types::size_t align)
{
static_assert(!AuIsClass_v<AuRemovePointer_t<T>>, "Do not use heap/kmem apis with classes");
return reinterpret_cast<T>(_ZAlloc(length, align));
}
template<typename T>
T *NewArray(Types::size_t count)
{
static_assert(!AuIsClass_v<T>, "Do not use heap/kmem apis with classes");
return reinterpret_cast<T *>(_FAlloc(count * sizeof(T)));
}
template<typename T>
T *NewArray(Types::size_t count, Types::size_t align)
{
static_assert(!AuIsClass_v<T>, "Do not use heap/kmem apis with classes");
return reinterpret_cast<T *>(_FAlloc(count * sizeof(T)), align);
}
template<typename T>
AuSPtr<T> AllocateFastArray(Types::size_t length, Types::size_t align = sizeof(T))
{
static_assert(!AuIsClass_v<AuRemovePointer_t<T>>, "Do not use heap/kmem apis with classes");
return AuSPtr<T>(reinterpret_cast<T *>(_FAlloc(length)), [](T *ptr)
{ {
_Free(ptr); return reinterpret_cast<T>(_ZAlloc(uLength));
}); }
else
{
return reinterpret_cast<T>(_ZAlloc(uLength, alignof(AuRemovePointer_t<T>)));
}
}
template<typename T = void *>
T ZAlloc(Types::size_t uLength, Types::size_t uAlignment)
{
static_assert((!AuIsClass_v<T> || (AuIsTriviallyConstructible_v<T> && AuIsTriviallyDestructible_v<T>)),
"Do not use heap/kmem apis with classes. Use AuNewClass[Unique] instead");
return reinterpret_cast<T>(_ZAlloc(uLength, uAlignment));
}
template<typename T = void *>
T FAlloc(Types::size_t uLength)
{
static_assert((!AuIsClass_v<AuRemovePointer_t<T>> || (AuIsTriviallyConstructible_v<AuRemovePointer_t<T>> && AuIsTriviallyDestructible_v<AuRemovePointer_t<T>>)),
"Do not use heap/kmem apis with classes. Use AuNewClass[Unique]");
if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
{
return reinterpret_cast<T>(_FAlloc(uLength));
}
else
{
return reinterpret_cast<T>(_FAlloc(uLength, alignof(AuRemovePointer_t<T>)));
}
}
template<typename T = void *>
T FAlloc(Types::size_t uLength, Types::size_t uAlignment)
{
static_assert((!AuIsClass_v<T> || (AuIsTriviallyConstructible_v<T> && AuIsTriviallyDestructible_v<T>)),
"Do not use heap/kmem apis with classes. Use AuNewClass[Unique] instead");
return reinterpret_cast<T>(_FAlloc(uLength, uAlignment));
} }
template<typename T> template<typename T>
T FAlloc(Types::size_t length) T *ZAlloc()
{ {
static_assert(!AuIsClass_v<AuRemovePointer_t<T>>, "Do not use heap/kmem apis with classes"); return ZAlloc<T *>(sizeof(T), alignof(T));
return reinterpret_cast<T>(_FAlloc(length));
} }
template<typename T> template<typename T>
T FAlloc(Types::size_t length, Types::size_t align) T *FAlloc()
{ {
static_assert(!AuIsClass_v<AuRemovePointer_t<T>>, "Do not use heap/kmem apis with classes"); return FAlloc<T *>(sizeof(T), alignof(T));
return reinterpret_cast<T>(_FAlloc(length, align));
} }
template<typename T> template<typename T>
T ZRealloc(T in, Types::size_t length) T *NewArray(Types::size_t uCount)
{ {
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(in), length)); static_assert((!AuIsClass_v<T> || (AuIsTriviallyConstructible_v<T> && AuIsTriviallyDestructible_v<T>)),
"Do not use heap/kmem apis with classes. Use AuNewClassArray[Unique] instead");
return reinterpret_cast<T *>(_FAlloc(uCount * sizeof(T), alignof(T)));
} }
template<typename T> template<typename T>
T ZRealloc(T in, Types::size_t length, Types::size_t alloc) T *NewArray(Types::size_t uCount, Types::size_t uAlignment)
{ {
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(in), length), alloc); static_assert((!AuIsClass_v<T> || (AuIsTriviallyConstructible_v<T> && AuIsTriviallyDestructible_v<T>)),
"Do not use heap/kmem apis with classes. Use AuNewClassArray[Unique] instead");
return reinterpret_cast<T *>(_FAlloc(uCount * sizeof(T), uAlignment));
} }
template<typename T> template<typename T>
T FRealloc(T in, Types::size_t length) AuSPtr<T> AllocateFastArray(Types::size_t uLength, Types::size_t uAlignment = alignof(T))
{ {
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(in), length)); static_assert((!AuIsClass_v<T> || (AuIsTriviallyConstructible_v<T> && AuIsTriviallyDestructible_v<T>)),
"Do not use heap/kmem apis with classes. Use AuNewClassArray instead");
return AuSPtr<T>(reinterpret_cast<T *>(_FAlloc(uLength, uAlignment)), &_Free);
} }
template<typename T> template<typename T>
T FRealloc(T in, Types::size_t length, Types::size_t alloc) T FRealloc(T in, Types::size_t uLength)
{ {
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(in), length), alloc); if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
{
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(in), uLength));
}
else
{
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(in), uLength, alignof(AuRemovePointer_t<T>)));
}
}
template<typename T>
T ZRealloc(T in, Types::size_t uLength)
{
if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
{
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(in), uLength));
}
else
{
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(in), uLength, alignof(AuRemovePointer_t<T>)));
}
}
template<typename T>
T ZRealloc(T in, Types::size_t uLength, Types::size_t uAlignment)
{
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(in), uLength, uAlignment));
}
template<typename T>
T FRealloc(T in, Types::size_t uLength, Types::size_t uAlignment)
{
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(in), uLength, uAlignment));
} }
template<typename T> template<typename T>

View File

@ -26,6 +26,11 @@ extern "C"
} }
#endif #endif
namespace Aurora::Memory
{
AuUInt32 RoundPageUp(AuUInt32 value);
}
namespace Aurora namespace Aurora
{ {
static bool gShouldResPathDoNothing {}; static bool gShouldResPathDoNothing {};
@ -778,4 +783,15 @@ namespace Aurora
return length.QuadPart; return length.QuadPart;
} }
void *SysAllocateLarge(AuUInt uLength)
{
uLength = AuMemory::RoundPageUp(uLength);
return VirtualAlloc(nullptr, uLength, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
}
void SysAllocateFree(void *pBuffer, AuUInt uLength)
{
VirtualFree(pBuffer, 0, MEM_RELEASE);
}
} }

View File

@ -8,12 +8,18 @@
#include "RuntimeInternal.hpp" #include "RuntimeInternal.hpp"
#include "AuProcAddresses.UNIX.hpp" #include "AuProcAddresses.UNIX.hpp"
#include <Source/Debug/ExceptionWatcher.Unix.hpp> #include <Source/Debug/ExceptionWatcher.Unix.hpp>
#include <sys/mman.h>
namespace Aurora::Process namespace Aurora::Process
{ {
void PosixForkResetLocks(); void PosixForkResetLocks();
} }
namespace Aurora::Memory
{
AuUInt32 RoundPageUp(AuUInt32 value);
}
namespace Aurora namespace Aurora
{ {
int PosixOpen(const char *pathname, int flags, mode_t mode) int PosixOpen(const char *pathname, int flags, mode_t mode)
@ -159,4 +165,16 @@ namespace Aurora
{ {
return PosixGetLength(uOSHandle); return PosixGetLength(uOSHandle);
} }
void *SysAllocateLarge(AuUInt uLength)
{
uLength = AuMemory::RoundPageUp(uLength);
return ::mmap(0, uLength, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
}
void SysAllocateFree(void *pBuffer, AuUInt uLength)
{
uLength = AuMemory::RoundPageUp(uLength);
::munmap(pBuffer, uLength);
}
} }

View File

@ -6,10 +6,8 @@
Author: Reece Author: Reece
***/ ***/
#include <RuntimeInternal.hpp> #include <RuntimeInternal.hpp>
#include "AuProcAddresses.hpp"
#if defined(AURORA_IS_MODERNNT_DERIVED)
#include "AuProcAddresses.NT.hpp"
#endif
namespace Aurora namespace Aurora
{ {
@ -29,4 +27,18 @@ namespace Aurora
InitLinuxAddresses(); InitLinuxAddresses();
#endif #endif
} }
#if !defined(AURORA_HAS_SYS_ALLOC_LARGE)
void *SysAllocateLarge(AuUInt uLength)
{
return AuMemory::FAlloc<void *>(uLength, 32);
}
void SysAllocateFree(void *pBuffer, AuUInt uLength)
{
AuMemory::Free(pBuffer);
}
#endif
} }

View File

@ -47,4 +47,14 @@ namespace Aurora
void SysWakeAllOnAddress(const void *pAddress); void SysWakeAllOnAddress(const void *pAddress);
AuUInt64 SysGetFileLength(AuUInt uOSHandle); AuUInt64 SysGetFileLength(AuUInt uOSHandle);
}
void *SysAllocateLarge(AuUInt uLength);
void SysAllocateFree(void *pBuffer, AuUInt uLength);
}
#if defined(AURORA_IS_MODERNNT_DERIVED) || defined(AURORA_IS_POSIX_DERIVED)
#if !defined(AURORA_HAS_SYS_ALLOC_LARGE)
#define AURORA_HAS_SYS_ALLOC_LARGE
#endif
#endif

View File

@ -38,7 +38,7 @@
#include "CmdLine/AuCmdLine.hpp" #include "CmdLine/AuCmdLine.hpp"
#include "Grug/AuGrug.hpp" #include "Grug/AuGrug.hpp"
#include "Threading/AuSleep.hpp" #include "Threading/AuSleep.hpp"
#include "Memory/Cache.hpp" #include "Memory/AuMemoryCache.hpp"
#include "Threading/Primitives/SMTYield.hpp" #include "Threading/Primitives/SMTYield.hpp"
#include <mimalloc.h> #include <mimalloc.h>
#include "Process/AuProcessStartTime.hpp" #include "Process/AuProcessStartTime.hpp"

View File

@ -8,27 +8,29 @@
#include <Source/RuntimeInternal.hpp> #include <Source/RuntimeInternal.hpp>
#include "Debug.hpp" #include "Debug.hpp"
#include "ErrorStack.hpp" #include "ErrorStack.hpp"
#include <Source/Memory/Heap.hpp> #include <Source/Memory/AuHeap.hpp>
#include <Source/Memory/AuBaseHeap.hpp>
namespace Aurora::Debug namespace Aurora::Debug
{ {
static thread_local AuSInt tlsMemoryCrunchCounter; static thread_local AuSInt tlsMemoryCrunchCounter;
static auto const kAutoReservePool = 3 * 1024 * 1024; static auto const kDefaultReservePool = 3 * 1024 * 1024;
AuUInt8 *gReservePoolStart {}; // Extern defintions
AuUInt8 *gReservePoolEnd {}; AuUInt8 *gReservePoolStart {};
AuUInt8 *gReservePoolEnd {};
AuSPtr<AuMemory::Heap> gReserveHeap {}; AuSPtr<AuMemory::Heap> gReserveHeap {};
void InitMemoryCrunch() void InitMemoryCrunch()
{ {
gReserveHeap = AuMemory::AllocHeapShared(gRuntimeConfig.debug.uDebugMemoryReserveSize ? gReserveHeap = AuMemory::AllocHeapShared(gRuntimeConfig.debug.uDebugMemoryReserveSize ?
gRuntimeConfig.debug.uDebugMemoryReserveSize : gRuntimeConfig.debug.uDebugMemoryReserveSize :
kAutoReservePool); kDefaultReservePool);
SysAssert(gReserveHeap); SysAssert(gReserveHeap);
auto pHeap = AuStaticCast<AuMemory::BaseHeap>(gReserveHeap); auto pHeap = AuStaticCast<AuMemory::BaseHeap>(gReserveHeap);
gReservePoolStart = AuReinterpretCast<AuUInt8 *>(pHeap->base_); gReservePoolStart = AuReinterpretCast<AuUInt8 *>(pHeap->GetHeapBase());
gReservePoolEnd = AuReinterpretCast<AuUInt8 *>(pHeap->base_) + pHeap->length_; gReservePoolEnd = AuReinterpretCast<AuUInt8 *>(gReservePoolStart) + pHeap->GetHeapLength();
} }
bool IsPointerReserveRange(void *ptr) bool IsPointerReserveRange(void *ptr)

View File

@ -9,7 +9,7 @@
namespace Aurora::HWInfo namespace Aurora::HWInfo
{ {
inline AuUInt32 gPageSize; inline AuUInt32 gPageSize { 4096 }; // harden: specify default value in case of early access
template<typename T> template<typename T>
AuOptional<T> QueryBsdHwStat(int id); AuOptional<T> QueryBsdHwStat(int id);

View File

@ -0,0 +1,28 @@
/***
Copyright (C) 2021-2024 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuBaseHeap.cpp
Date: 2021-6-12
Author: Reece
***/
#include <Source/RuntimeInternal.hpp>
#include "AuBaseHeap.hpp"
namespace Aurora::Memory
{
HeapStats &BaseHeap::GetStats()
{
UpdateStats();
return stats;
}
void *BaseHeap::GetHeapBase()
{
return this->pBase_;
}
AuUInt BaseHeap::GetHeapLength()
{
return this->uLength_;
}
}

View File

@ -0,0 +1,28 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuBaseHeap.hpp
Date: 2021-6-12
Author: Reece
***/
#pragma once
namespace Aurora::Memory
{
struct BaseHeap : Heap
{
virtual void UpdateStats() = 0;
HeapStats &GetStats() override;
void *GetHeapBase();
AuUInt GetHeapLength();
protected:
HeapStats stats;
protected:
void *pBase_ {};
AuUInt uLength_ {};
};
}

View File

@ -0,0 +1,144 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuDefaultHeap.cpp
Date: 2021-6-13
Author: Reece
***/
#include <Source/RuntimeInternal.hpp>
#include "AuDefaultHeap.hpp"
#include "AuHeap.hpp"
#include <Source/Debug/MemoryCrunch.hpp>
#include <mimalloc.h>
namespace Aurora::Memory
{
AuSPtr<Heap> DefaultHeap::AllocateDivision(AuUInt32 heap, AuUInt32 alignment)
{
return AllocateDivisionGlobal(this, heap, alignment);
}
void *DefaultHeap::_ZAlloc(Types::size_t length)
{
return AuMemory::_ZAlloc(length);
}
void *DefaultHeap::_ZAlloc(Types::size_t length, Types::size_t align)
{
return AuMemory::_ZAlloc(length, align);
}
Types::size_t DefaultHeap::GetChunkSize(const void *head)
{
return AuMemory::GetChunkSize(head);
}
void *DefaultHeap::_FAlloc(Types::size_t length)
{
return AuMemory::_FAlloc(length);
}
void *DefaultHeap::_FAlloc(Types::size_t length, Types::size_t align)
{
return AuMemory::_FAlloc(length, align);
}
void *DefaultHeap::_ZRealloc(void *buffer, Types::size_t length, Types::size_t align)
{
return AuMemory::_ZRealloc(buffer, length, align);
}
void *DefaultHeap::_ZRealloc(void *buffer, Types::size_t length)
{
return AuMemory::_ZRealloc(buffer, length);
}
void *DefaultHeap::_FRealloc(void *buffer, Types::size_t length, Types::size_t align)
{
return AuMemory::_FRealloc(buffer, length, align);
}
void *DefaultHeap::_FRealloc(void *buffer, Types::size_t length)
{
return AuMemory::_FRealloc(buffer, length);
}
void DefaultHeap::_Free(void *buffer)
{
return AuMemory::_Free(buffer);
}
AuSPtr<Heap> DefaultHeap::GetSelfReference()
{
return {};
}
Heap *DefaultHeap::GetSelfReferenceRaw()
{
return this;
}
struct WalkInstance
{
bool(*fCallback)(void *, void *);
void *pSecondArg;
};
static bool mi_block_visit_funHandler(const mi_heap_t *heap, const mi_heap_area_t *area, void *block, size_t block_size, void *arg)
{
auto pWalkInstance = (WalkInstance *)arg;
if (!block)
{
return true;
}
if (!area->used)
{
return true;
}
#if 0
for (AU_ITERATE_N(i, area->used))
{
if (!pWalkInstance->fCallback(((AuUInt *)block) + (i * area->block_size), pWalkInstance->pSecondArg))
{
return false;
}
}
#else
return pWalkInstance->fCallback(block, pWalkInstance->pSecondArg);
#endif
return true;
}
void DefaultHeap::WalkHeap(bool(*fCallback)(void *, void*), void *pSecondArg)
{
WalkInstance inst;
inst.fCallback = fCallback;
inst.pSecondArg = pSecondArg;
mi_heap_visit_blocks(mi_heap_get_default(), true , &mi_block_visit_funHandler, &inst);
}
void DefaultHeap::UpdateStats()
{
auto other = AuDebug::gReserveHeap->GetStats();
this->stats.bIsSharedWithOtherHeaps = true;
this->stats.uBytesLiveCounter = gBytesCounterAllocated + other.uBytesLiveCounter;
this->stats.uBytesPeakCounter = AuMax(gBytesCounterPeak, other.uBytesPeakCounter);
if (!this->stats.uBytesCapacity)
{
this->stats.uBytesCapacity = Aurora::HWInfo::GetMemStatSystem /*should be process, but process is this with extra steps.*/().value_or(AuHwInfo::RamStat { }).qwAvailable;
}
}
static DefaultHeap gDefaultAllocation;
AUKN_SYM Heap *DefaultDiscontiguousHeapNew()
{
return &gDefaultAllocation;
}
AUKN_SYM void DefaultDiscontiguousHeapRelease(Heap * heap) {}
}

View File

@ -0,0 +1,34 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuDefaultHeap.hpp
Date: 2021-6-13
Author: Reece
***/
#pragma once
#include "AuBaseHeap.hpp"
namespace Aurora::Memory
{
struct DefaultHeap : BaseHeap
{
AuSPtr<Heap> AllocateDivision(AuUInt32 heap, AuUInt32 alignment) override;
void *_ZAlloc(Types::size_t length) override;
void *_ZAlloc(Types::size_t length, Types::size_t align) override;
Types::size_t GetChunkSize(const void *head) override;
void *_FAlloc(Types::size_t length) override;
void *_FAlloc(Types::size_t length, Types::size_t align) override;
void *_ZRealloc(void *buffer, Types::size_t length, Types::size_t align) override;
void *_ZRealloc(void *buffer, Types::size_t length) override;
void *_FRealloc(void *buffer, Types::size_t length, Types::size_t align) override;
void *_FRealloc(void *buffer, Types::size_t length) override;
void _Free(void *buffer) override;
AuSPtr<Heap> GetSelfReference() override;
Heap *GetSelfReferenceRaw() override;
void WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg) override;
void UpdateStats() override;
};
}

17
Source/Memory/AuHeap.cpp Normal file
View File

@ -0,0 +1,17 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuHeap.cpp
Date: 2021-6-12
Author: Reece
***/
#include <Source/RuntimeInternal.hpp>
#include "AuHeap.hpp"
namespace Aurora::Memory
{
AuUInt32 RoundPageUp(AuUInt32 uLength)
{
return AuPageRoundUp(uLength, HWInfo::GetPageSize());
}
}

18
Source/Memory/AuHeap.hpp Normal file
View File

@ -0,0 +1,18 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuHeap.hpp
Date: 2021-6-12
Author: Reece
***/
#pragma once
#include "AuMemory.hpp"
#include "AuBaseHeap.hpp"
namespace Aurora::Memory
{
AuUInt32 RoundPageUp(AuUInt32 value);
AuSPtr<Heap> AllocateDivisionGlobal(Heap *heap, AuUInt32 length, AuUInt32 alignment);
}

View File

@ -0,0 +1,28 @@
/***
Copyright (C) 2021-2024 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuHeapDeletable.cpp
File: AuHeap.cpp
Date: 2021-6-12
Author: Reece
***/
#include <Source/RuntimeInternal.hpp>
#include "AuHeapDeletable.hpp"
namespace Aurora::Memory
{
DeletableHeap::DeletableHeap(Heap *parent, void *ptr) :
pParent(pParent),
ptr2_(ptr)
{
}
DeletableHeap::~DeletableHeap()
{
if (this->ptr2_)
{
pParent->Free(this->ptr2_);
}
}
}

View File

@ -0,0 +1,23 @@
/***
Copyright (C) 2021-2024 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuHeapDeletable.hpp
File: Heap.hpp
Date: 2021-6-12
Author: Reece
***/
#pragma once
#include "AuHeapInternal.hpp"
namespace Aurora::Memory
{
struct DeletableHeap : InternalHeap
{
Heap *pParent {};
void *ptr2_ {};
DeletableHeap(Heap *pParent, void *ptr);
~DeletableHeap();
};
}

View File

@ -0,0 +1,349 @@
/***
Copyright (C) 2021-2024 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuHeapInternal.cpp
File: AuHeap.cpp
Date: 2021-6-12
Author: Reece
***/
#include <Source/RuntimeInternal.hpp>
#include "AuHeap.hpp"
#include "AuHeapInternal.hpp"
#include "AuHeapDeletable.hpp"
typedef struct FragmentHeader
{
void * next;
void * prev;
size_t size;
bool used;
} FragmentHeader;
namespace Aurora::Memory
{
InternalHeap::InternalHeap() :
heap_(nullptr),
count_(0)
{ }
InternalHeap::InternalHeap(const MemoryViewWrite &memory)
{
SysAssert(this->Init(memory.length, memory.ptr));
}
InternalHeap::InternalHeap(AuUInt uLength)
{
SysAssert(this->Init(uLength), "Couldn't initialize inline AuHeap! [OOM]");
}
InternalHeap::~InternalHeap()
{
SysAssertDbgExp(this->count_ == 0);
if (this->pBase_)
{
if (this->heap_)
{
this->heap_ = nullptr;
}
if (this->bOwnsMemory_)
{
SysAllocateFree(this->pBase_, this->uLength_);
this->pBase_ = nullptr;
}
}
}
AuUInt InternalHeap::GetHeapSize(const void *ptr)
{
return reinterpret_cast<const FragmentHeader *>(ptr)[-1].size;
}
bool InternalHeap::Init(AuUInt uLength, void *ptr)
{
SysAssert(!this->pBase_, "heap already initialized");
SysAssert(uLength, "invalid heap allocation");
if (ptr)
{
this->pBase_ = ptr;
this->uLength_ = uLength;
this->bOwnsMemory_ = false;
}
else
{
if (uLength <= 4096)
{
uLength = 4096;
}
if (!(this->pBase_ = SysAllocateLarge(uLength)))
{
return false;
}
this->bOwnsMemory_ = true;
this->uLength_ = uLength;
}
if (!(this->heap_ = o1heapInit(this->pBase_, uLength)))
{
return false;
}
return true;
}
Types::size_t InternalHeap::GetChunkSize(const void *head)
{
return InternalHeap::GetHeapSize(head);
}
AuSPtr<Heap> InternalHeap::AllocateDivision(AuUInt32 heap, AuUInt32 alignment)
{
return AllocateDivisionGlobal(this, heap, alignment);
}
AuSPtr<Heap> AllocateDivisionGlobal(Heap *heap, AuUInt32 uLength, AuUInt32 alignment)
{
auto ptr = heap->ZAlloc<void *>(uLength, alignment);
if (!ptr)
{
return {};
}
auto ret = AuMakeShared<DeletableHeap>(heap, ptr);
if (!ret)
{
heap->Free(ptr);
return {};
}
if (!ret->Init(uLength, ptr))
{
return {};
}
return ret;
}
void *InternalHeap::_FAlloc(Types::size_t uLength)
{
if (!this->heap_)
{
return nullptr;
}
auto ret = o1heapAllocate(this->heap_, uLength);
if (ret)
{
AuAtomicAdd(&this->count_, 1);
}
return ret;
}
void *InternalHeap::_FAlloc(Types::size_t uLength, Types::size_t uAlign)
{
SysAssert(uAlign <= O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
return this->_FAlloc(uLength);
}
void *InternalHeap::_ZAlloc(Types::size_t uLength)
{
if (!this->heap_)
{
return nullptr;
}
auto ptr = this->_FAlloc(uLength);
if (!ptr)
{
return nullptr;
}
AuMemset(ptr, 0, uLength);
return ptr;
}
void *InternalHeap::_ZAlloc(Types::size_t uLength, Types::size_t uAlign)
{
SysAssert(uAlign <= O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
return _ZAlloc(uLength);
}
void *InternalHeap::_ZRealloc(void *pBuffer, Types::size_t uLength)
{
auto prevLength = GetHeapSize(pBuffer);
auto alloc = this->_ZAlloc(uLength);
if (!alloc)
{
return nullptr;
}
AuMemcpy(alloc, pBuffer, AuMin(prevLength, uLength));
this->_Free(pBuffer);
return alloc;
}
void *InternalHeap::_ZRealloc(void *pBuffer, Types::size_t uLength, Types::size_t uAlign)
{
SysAssert(uAlign <= O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
return this->_ZRealloc(pBuffer, uLength);
}
void *InternalHeap::_FRealloc(void *pBuffer, Types::size_t uLength)
{
auto prevLength = GetHeapSize(pBuffer);
auto alloc = this->_FAlloc(uLength);
if (!alloc)
{
return nullptr;
}
AuMemcpy(alloc, pBuffer, AuMin(prevLength, uLength));
this->_Free(pBuffer);
return alloc;
}
void *InternalHeap::_FRealloc(void *pBuffer, Types::size_t uLength, Types::size_t uAlign)
{
SysAssert(uAlign <= O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
return this->_FRealloc(pBuffer, uLength);
}
void InternalHeap::_Free(void *pBuffer)
{
if (pBuffer == nullptr)
{
return;
}
o1heapFree(this->heap_, pBuffer);
DecrementUsers();
}
void InternalHeap::DecrementUsers()
{
if (AuAtomicSub(&this->count_, 1) == 0)
{
TryRelease();
}
}
void InternalHeap::TryRelease()
{
if (!this->bIsDangling_)
{
return;
}
if (count_ == 0)
{
delete this;
}
}
void InternalHeap::RequestTermination()
{
if (AuAtomicLoad(&this->count_))
{
SysPushErrorMemory("Heap life was less than its allocations, waiting for final free");
SysPushErrorMemory("Reporting using mayday!");
// Write a crash dump for later review, and do not panic.
// We just have a leak with no sign of corruption
Telemetry::Mayday();
this->bIsDangling_ = true;
}
else
{
delete this;
}
}
void InternalHeap::UpdateStats()
{
auto pDiag = o1heapGetDiagnostics(this->heap_);
this->stats.uBytesLiveCounter = pDiag.allocated;
this->stats.uBytesCapacity = pDiag.capacity;
this->stats.uBytesPeakCounter = pDiag.peak_allocated;
}
void InternalHeap::WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg)
{
o1heapTraverseHeap(this->heap_, fCallback, pSecondArg);
}
AuSPtr<Heap> InternalHeap::GetSelfReference()
{
try
{
return AuSharedFromThis();
}
catch (...)
{
return {};
}
}
Heap *InternalHeap::GetSelfReferenceRaw()
{
return this;
}
AUKN_SYM Heap *AllocHeapNew(AuUInt uSize)
{
auto pHeap = _new InternalHeap();
if (!pHeap)
{
return nullptr;
}
if (!pHeap->Init(uSize, nullptr))
{
delete pHeap;
return nullptr;
}
return pHeap;
}
AUKN_SYM void AllocHeapRelease(Heap *pHeap)
{
static_cast<InternalHeap *>(pHeap)->RequestTermination();
}
AUKN_SYM Heap *RequestHeapOfRegionNew(const MemoryViewWrite &memory)
{
if (!memory)
{
SysPushErrorArg();
return nullptr;
}
auto pHeap = _new InternalHeap();
if (!pHeap)
{
return nullptr;
}
if (!pHeap->Init(memory.length, memory.ptr))
{
delete pHeap;
return nullptr;
}
return pHeap;
}
AUKN_SYM void RequestHeapOfRegionRelease(Heap *pHeap)
{
static_cast<InternalHeap *>(pHeap)->RequestTermination();
}
AUROXTL_INTERFACE_SOO_SRC_EX(AURORA_SYMBOL_EXPORT, RequestHeapOfRegion, InternalHeap, (const MemoryViewWrite &, memory))
AUROXTL_INTERFACE_SOO_SRC_EX(AURORA_SYMBOL_EXPORT, AllocHeap, InternalHeap, (AuUInt, uLength))
}

View File

@ -0,0 +1,64 @@
/***
Copyright (C) 2021-2024 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: AuHeapInternal.hpp
File: Heap.hpp
Date: 2021-6-12
Author: Reece
***/
#pragma once
#include "AuMemory.hpp"
#include "AuBaseHeap.hpp"
#include "o1heap.hpp"
namespace Aurora::Memory
{
struct InternalHeap :
BaseHeap,
AuEnableSharedFromThis<InternalHeap>
{
virtual AuSPtr<Heap> AllocateDivision(AuUInt32 heap, AuUInt32 alignment) override;
InternalHeap();
InternalHeap(const MemoryViewWrite &memory);
InternalHeap(AuUInt uLength);
virtual ~InternalHeap();
bool Init(AuUInt uLength, void *ptr = nullptr);
static AuUInt GetHeapSize(const void *ptr);
Types::size_t GetChunkSize(const void *head) override;
void *_FAlloc(Types::size_t uLength) override;
void *_FAlloc(Types::size_t uLength, Types::size_t uAlign) override;
void *_ZAlloc(Types::size_t uLength) override;
void *_ZAlloc(Types::size_t uLength, Types::size_t uAlign) override;
void *_ZRealloc(void *pBuffer, Types::size_t uLength) override;
void *_ZRealloc(void *pBuffer, Types::size_t uLength, Types::size_t uAlign) override;
void *_FRealloc(void *pBuffer, Types::size_t uLength) override;
void *_FRealloc(void *pBuffer, Types::size_t uLength, Types::size_t uAlign) override;
void _Free(void *pBuffer) override;
AuSPtr<Heap> GetSelfReference() override;
Heap *GetSelfReferenceRaw() override;
void TryRelease();
void DecrementUsers();
void RequestTermination();
void UpdateStats() override;
void WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg) override;
private:
O1HeapInstance *heap_ {};
int count_ {};
bool bIsDangling_ {};
protected:
bool bOwnsMemory_ {};
};
}

View File

@ -1,14 +1,13 @@
/*** /***
Copyright (C) 2024 J Reece Wilson (a/k/a "Reece"). All rights reserved. Copyright (C) 2024 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: HeapProxy.cpp File: AuHeapProxy.cpp
Date: 2024-1-16 Date: 2024-1-16
Author: Reece Author: Reece
***/ ***/
#include <Source/RuntimeInternal.hpp> #include <Source/RuntimeInternal.hpp>
#include "Memory.hpp" #include "AuHeap.hpp"
#include "Heap.hpp" #include "AuHeapProxy.hpp"
#include "HeapProxy.hpp"
namespace Aurora::Memory namespace Aurora::Memory
{ {
@ -58,9 +57,9 @@ namespace Aurora::Memory
} }
} }
void *ProxyHeap::_ZAlloc(Types::size_t uLength, Types::size_t align) void *ProxyHeap::_ZAlloc(Types::size_t uLength, Types::size_t uAlign)
{ {
if (auto pThat = this->pHeap->ZAlloc(uLength, align)) if (auto pThat = this->pHeap->ZAlloc(uLength, uAlign))
{ {
auto uLengthCurrent = this->GetChunkSize(pThat); auto uLengthCurrent = this->GetChunkSize(pThat);
AuAtomicAdd(&this->uBytesLifetime, uLengthCurrent); AuAtomicAdd(&this->uBytesLifetime, uLengthCurrent);
@ -103,9 +102,9 @@ namespace Aurora::Memory
} }
} }
void *ProxyHeap::_FAlloc(Types::size_t uLength, Types::size_t align) void *ProxyHeap::_FAlloc(Types::size_t uLength, Types::size_t uAlign)
{ {
if (auto pThat = this->pHeap->_FAlloc(uLength, align)) if (auto pThat = this->pHeap->_FAlloc(uLength, uAlign))
{ {
auto uLengthCurrent = this->GetChunkSize(pThat); auto uLengthCurrent = this->GetChunkSize(pThat);
AuAtomicAdd(&this->uBytesLifetime, uLengthCurrent); AuAtomicAdd(&this->uBytesLifetime, uLengthCurrent);
@ -123,13 +122,13 @@ namespace Aurora::Memory
} }
} }
void *ProxyHeap::_ZRealloc(void *pHead, Types::size_t uLength, Types::size_t align) void *ProxyHeap::_ZRealloc(void *pHead, Types::size_t uLength, Types::size_t uAlign)
{ {
if (pHead) if (pHead)
{ {
auto uLengthCurrent = this->GetChunkSize(pHead); auto uLengthCurrent = this->GetChunkSize(pHead);
if (auto pThat = this->pHeap->_ZRealloc(pHead, uLength, align)) if (auto pThat = this->pHeap->_ZRealloc(pHead, uLength, uAlign))
{ {
auto uLengthNext = this->GetChunkSize(pThat); auto uLengthNext = this->GetChunkSize(pThat);
AuAtomicAdd(&this->uBytesFree, uLengthCurrent); AuAtomicAdd(&this->uBytesFree, uLengthCurrent);
@ -185,13 +184,13 @@ namespace Aurora::Memory
} }
} }
void *ProxyHeap::_FRealloc(void *pHead, Types::size_t uLength, Types::size_t align) void *ProxyHeap::_FRealloc(void *pHead, Types::size_t uLength, Types::size_t uAlign)
{ {
if (pHead) if (pHead)
{ {
auto uLengthCurrent = this->GetChunkSize(pHead); auto uLengthCurrent = this->GetChunkSize(pHead);
if (auto pThat = this->pHeap->_FRealloc(pHead, uLength, align)) if (auto pThat = this->pHeap->_FRealloc(pHead, uLength, uAlign))
{ {
auto uLengthNext = this->GetChunkSize(pThat); auto uLengthNext = this->GetChunkSize(pThat);
AuAtomicAdd(&this->uBytesFree, uLengthCurrent); AuAtomicAdd(&this->uBytesFree, uLengthCurrent);
@ -329,4 +328,8 @@ namespace Aurora::Memory
{ {
AuSafeDelete<ProxyHeap *>(heap); AuSafeDelete<ProxyHeap *>(heap);
} }
AUROXTL_INTERFACE_SOO_SRC_EX(AURORA_SYMBOL_EXPORT, HeapProxy, ProxyHeap, (const AuSPtr<Heap> &, pHead))
AUROXTL_INTERFACE_SOO_SRC_EX(AURORA_SYMBOL_EXPORT, HeapProxyEx, ProxyHeap, (const AuSPtr<Heap> &, pHead), (LeakFinderAlloc_f, pfAlloc), (LeakFinderFree_f, pfFree))
} }

View File

@ -1,7 +1,7 @@
/*** /***
Copyright (C) 2024 J Reece Wilson (a/k/a "Reece"). All rights reserved. Copyright (C) 2024 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: HeapProxy.hpp File: AuHeapProxy.hpp
Date: 2024-1-16 Date: 2024-1-16
Author: Reece Author: Reece
***/ ***/

View File

@ -1,19 +1,14 @@
/*** /***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved. Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: Memory.cpp File: AuMemory.cpp
Date: 2021-6-12 Date: 2021-6-12
Author: Reece Author: Reece
***/ ***/
#include <Source/RuntimeInternal.hpp> #include <Source/RuntimeInternal.hpp>
#include "Memory.hpp" #include "AuMemory.hpp"
#include <mimalloc.h> #include <mimalloc.h>
#include <Source/HWInfo/AuHWInfo.hpp> #include <Source/HWInfo/AuHWInfo.hpp>
#if defined(AURORA_IS_LINUX_DERIVED)
#include <sys/mman.h>
#endif
#include <Source/Debug/MemoryCrunch.hpp> #include <Source/Debug/MemoryCrunch.hpp>
#include <Source/Debug/ErrorStack.hpp> #include <Source/Debug/ErrorStack.hpp>
@ -23,10 +18,10 @@ namespace Aurora::Memory
AuUInt gBytesCounterPeak {}; AuUInt gBytesCounterPeak {};
thread_local AuInt64 tlsLastOutOfMemory {}; thread_local AuInt64 tlsLastOutOfMemory {};
thread_local MemoryLowNotification_f tlsMemoryLowNotification;
static LeakFinderAlloc_f gLeakFinderAlloc; static LeakFinderAlloc_f gLeakFinderAlloc;
static LeakFinderFree_f gLeakFinderFree; static LeakFinderFree_f gLeakFinderFree;
thread_local MemoryLowNotification_f tlsMemoryLowNotification;
static void AddBytesToCounter(AuUInt uBytes) static void AddBytesToCounter(AuUInt uBytes)
{ {
@ -148,8 +143,7 @@ namespace Aurora::Memory
AUKN_SYM void *_ZRealloc(void *buffer, Types::size_t length, Types::size_t align) AUKN_SYM void *_ZRealloc(void *buffer, Types::size_t length, Types::size_t align)
{ {
SysAssertDbg(length); void *pRet = nullptr;
void *pRet;
if (AuDebug::IsPointerReserveRange(buffer)) if (AuDebug::IsPointerReserveRange(buffer))
{ {
@ -159,9 +153,15 @@ namespace Aurora::Memory
{ {
auto oldLen = ::mi_malloc_size(buffer); auto oldLen = ::mi_malloc_size(buffer);
pRet = ::mi_rezalloc_aligned(buffer, length, align); if (buffer && !length)
{
if (pRet) ::mi_free(buffer);
}
else if (!buffer && length)
{
return _ZAlloc(length, align);
}
else if ((pRet = ::mi_rezalloc_aligned(buffer, length, align)))
{ {
auto uNewSize = ::mi_malloc_size(pRet); auto uNewSize = ::mi_malloc_size(pRet);
@ -178,14 +178,12 @@ namespace Aurora::Memory
gLeakFinderAlloc(pRet, uNewSize); gLeakFinderAlloc(pRet, uNewSize);
} }
} }
else if (buffer && length)
{
OnOOM(length);
}
} }
if (!pRet) if (!pRet && buffer && length)
{ {
OnOOM(length);
if (gRuntimeConfig.debug.bIsMemoryErrorFatal) if (gRuntimeConfig.debug.bIsMemoryErrorFatal)
{ {
SysPanic("ZAllocEx out of memory"); SysPanic("ZAllocEx out of memory");
@ -197,8 +195,7 @@ namespace Aurora::Memory
AUKN_SYM void *_ZRealloc(void *buffer, Types::size_t length) AUKN_SYM void *_ZRealloc(void *buffer, Types::size_t length)
{ {
SysAssertDbg(length); void *pRet = nullptr;
void *pRet;
if (AuDebug::IsPointerReserveRange(buffer)) if (AuDebug::IsPointerReserveRange(buffer))
{ {
@ -208,9 +205,15 @@ namespace Aurora::Memory
{ {
auto oldLen = ::mi_malloc_size(buffer); auto oldLen = ::mi_malloc_size(buffer);
pRet = ::mi_rezalloc(buffer, length); if (buffer && !length)
{
if (pRet) ::mi_free(buffer);
}
else if (!buffer && length)
{
return _ZAlloc(length);
}
else if ((pRet = ::mi_rezalloc(buffer, length)))
{ {
auto uNewSize = ::mi_malloc_size(pRet); auto uNewSize = ::mi_malloc_size(pRet);
@ -227,14 +230,12 @@ namespace Aurora::Memory
gLeakFinderAlloc(pRet, uNewSize); gLeakFinderAlloc(pRet, uNewSize);
} }
} }
else if (buffer && length)
{
OnOOM(length);
}
} }
if (!pRet) if (!pRet && buffer && length)
{ {
OnOOM(length);
if (gRuntimeConfig.debug.bIsMemoryErrorFatal) if (gRuntimeConfig.debug.bIsMemoryErrorFatal)
{ {
SysPanic("ZAlloc out of memory"); SysPanic("ZAlloc out of memory");
@ -246,8 +247,7 @@ namespace Aurora::Memory
AUKN_SYM void *_FRealloc(void *buffer, Types::size_t length, Types::size_t align) AUKN_SYM void *_FRealloc(void *buffer, Types::size_t length, Types::size_t align)
{ {
SysAssertDbg(length); void *pRet = nullptr;
void *pRet;
if (AuDebug::IsPointerReserveRange(buffer)) if (AuDebug::IsPointerReserveRange(buffer))
{ {
@ -257,9 +257,15 @@ namespace Aurora::Memory
{ {
auto oldLen = ::mi_malloc_size(buffer); auto oldLen = ::mi_malloc_size(buffer);
pRet = ::mi_realloc_aligned(buffer, length, align); if (buffer && !length)
{
if (pRet) ::mi_free(buffer);
}
else if (!buffer && length)
{
return _FAlloc(length, align);
}
else if ((pRet = ::mi_realloc_aligned(buffer, length, align)))
{ {
auto uNewSize = ::mi_malloc_size(pRet); auto uNewSize = ::mi_malloc_size(pRet);
@ -276,14 +282,12 @@ namespace Aurora::Memory
gLeakFinderAlloc(pRet, uNewSize); gLeakFinderAlloc(pRet, uNewSize);
} }
} }
else if (buffer && length)
{
OnOOM(length);
}
} }
if (!pRet) if (!pRet && buffer && length)
{ {
OnOOM(length);
if (gRuntimeConfig.debug.bIsMemoryErrorFatal) if (gRuntimeConfig.debug.bIsMemoryErrorFatal)
{ {
SysPanic("FReallocEx out of memory"); SysPanic("FReallocEx out of memory");
@ -295,8 +299,7 @@ namespace Aurora::Memory
AUKN_SYM void *_FRealloc(void *buffer, Types::size_t length) AUKN_SYM void *_FRealloc(void *buffer, Types::size_t length)
{ {
SysAssertDbg(length); void *pRet = nullptr;
void *pRet;
if (AuDebug::IsPointerReserveRange(buffer)) if (AuDebug::IsPointerReserveRange(buffer))
{ {
@ -306,9 +309,15 @@ namespace Aurora::Memory
{ {
auto oldLen = ::mi_malloc_size(buffer); auto oldLen = ::mi_malloc_size(buffer);
pRet = ::mi_realloc(buffer, length); if (buffer && !length)
{
if (pRet) ::mi_free(buffer);
}
else if (!buffer && length)
{
return _FAlloc(length);
}
else if ((pRet = ::mi_realloc(buffer, length)))
{ {
auto uNewSize = ::mi_malloc_size(pRet); auto uNewSize = ::mi_malloc_size(pRet);
@ -325,14 +334,12 @@ namespace Aurora::Memory
gLeakFinderAlloc(pRet, uNewSize); gLeakFinderAlloc(pRet, uNewSize);
} }
} }
else if (buffer && length)
{
OnOOM(length);
}
} }
if (!pRet) if (!pRet && buffer && length)
{ {
OnOOM(length);
if (gRuntimeConfig.debug.bIsMemoryErrorFatal) if (gRuntimeConfig.debug.bIsMemoryErrorFatal)
{ {
SysPanic("FRealloc out of memory"); SysPanic("FRealloc out of memory");
@ -360,7 +367,6 @@ namespace Aurora::Memory
RemoveBytesFromCounter(uCount); RemoveBytesFromCounter(uCount);
::mi_free(pHead); ::mi_free(pHead);
if (gLeakFinderFree) if (gLeakFinderFree)
{ {
gLeakFinderFree(pHead, uCount); gLeakFinderFree(pHead, uCount);

View File

@ -1,12 +1,14 @@
/*** /***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved. Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: Memory.hpp File: AuMemory.hpp
Date: 2021-6-12 Date: 2021-6-12
Author: Reece Author: Reece
***/ ***/
#pragma once #pragma once
#include "AuMemory.hpp"
namespace Aurora::Memory namespace Aurora::Memory
{ {
extern AuUInt gBytesCounterAllocated; extern AuUInt gBytesCounterAllocated;

View File

@ -1,13 +1,13 @@
/*** /***
Copyright (C) 2022 J Reece Wilson (a/k/a "Reece"). All rights reserved. Copyright (C) 2022 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: Cache.cpp File: AuMemoryCache.cpp
Date: 2022-3-21 Date: 2022-3-21
Author: Reece Author: Reece
***/ ***/
#include <Source/RuntimeInternal.hpp> #include <Source/RuntimeInternal.hpp>
#include "Memory.hpp" #include "AuMemory.hpp"
#include "Cache.hpp" #include "AuMemoryCache.hpp"
#include <Source/HWInfo/AuHWInfo.hpp> #include <Source/HWInfo/AuHWInfo.hpp>
#define LINUX_SUPPORTS_CACHE_CTL 0 #define LINUX_SUPPORTS_CACHE_CTL 0

View File

@ -1,7 +1,7 @@
/*** /***
Copyright (C) 2022 J Reece Wilson (a/k/a "Reece"). All rights reserved. Copyright (C) 2022 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: Cache.hpp File: AuMemoryCache.hpp
Date: 2022-3-21 Date: 2022-3-21
Author: Reece Author: Reece
***/ ***/

View File

@ -1,13 +1,13 @@
/*** /***
Copyright (C) 2022 J Reece Wilson (a/k/a "Reece"). All rights reserved. Copyright (C) 2022 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: SwapLock.cpp File: AuMemorySwapLock.cpp
Date: 2022-3-21 Date: 2022-3-21
Author: Reece Author: Reece
***/ ***/
#include <Source/RuntimeInternal.hpp> #include <Source/RuntimeInternal.hpp>
#include "Memory.hpp" #include "AuMemory.hpp"
#include "SwapLock.hpp" #include "AuMemorySwapLock.hpp"
#include <Source/HWInfo/AuHWInfo.hpp> #include <Source/HWInfo/AuHWInfo.hpp>
#if defined(AURORA_IS_LINUX_DERIVED) #if defined(AURORA_IS_LINUX_DERIVED)

View File

@ -1,7 +1,7 @@
/*** /***
Copyright (C) 2022 J Reece Wilson (a/k/a "Reece"). All rights reserved. Copyright (C) 2022 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: SwapLock.hpp File: AuMemorySwapLock.hpp
Date: 2022-3-21 Date: 2022-3-21
Author: Reece Author: Reece
***/ ***/

View File

@ -1,148 +0,0 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: DefaultHeap.cpp
Date: 2021-6-13
Author: Reece
***/
#include <Source/RuntimeInternal.hpp>
#include "Memory.hpp"
#include "DefaultHeap.hpp"
#include "Heap.hpp"
#include <Source/Debug/MemoryCrunch.hpp>
#include <mimalloc.h>
namespace Aurora::Memory
{
struct DefaultHeap : BaseHeap
{
AuSPtr<Heap> AllocateDivision(AuUInt32 heap, AuUInt32 alignment) override
{
return AllocateDivisionGlobal(this, heap, alignment);
}
void *_ZAlloc(Types::size_t length) override
{
return Aurora::Memory::_ZAlloc(length);
}
void *_ZAlloc(Types::size_t length, Types::size_t align) override
{
return Aurora::Memory::_ZAlloc(length, align);
}
Types::size_t GetChunkSize(const void *head) override
{
return Aurora::Memory::GetChunkSize(head);
}
void *_FAlloc(Types::size_t length) override
{
return Aurora::Memory::_FAlloc(length);
}
void *_FAlloc(Types::size_t length, Types::size_t align) override
{
return Aurora::Memory::_FAlloc(length, align);
}
void *_ZRealloc(void *buffer, Types::size_t length, Types::size_t align) override
{
return Aurora::Memory::_ZRealloc(buffer, length, align);
}
void *_ZRealloc(void *buffer, Types::size_t length) override
{
return Aurora::Memory::_ZRealloc(buffer, length);
}
void *_FRealloc(void *buffer, Types::size_t length, Types::size_t align) override
{
return Aurora::Memory::_FRealloc(buffer, length, align);
}
void *_FRealloc(void *buffer, Types::size_t length) override
{
return Aurora::Memory::_FRealloc(buffer, length);
}
void _Free(void *buffer) override
{
return Aurora::Memory::_Free(buffer);
}
AuSPtr<Heap> GetSelfReference() override
{
return {};
}
Heap *GetSelfReferenceRaw() override
{
return this;
}
struct WalkInstance
{
bool(*fCallback)(void *, void *);
void *pSecondArg;
};
static bool mi_block_visit_funHandler(const mi_heap_t *heap, const mi_heap_area_t *area, void *block, size_t block_size, void *arg)
{
auto pWalkInstance = (WalkInstance *)arg;
if (!block)
{
return true;
}
if (!area->used)
{
return true;
}
#if 0
for (AU_ITERATE_N(i, area->used))
{
if (!pWalkInstance->fCallback(((AuUInt *)block) + (i * area->block_size), pWalkInstance->pSecondArg))
{
return false;
}
}
#else
return pWalkInstance->fCallback(block, pWalkInstance->pSecondArg);
#endif
return true;
}
void WalkHeap(bool(*fCallback)(void *, void*), void *pSecondArg) override
{
WalkInstance inst;
inst.fCallback = fCallback;
inst.pSecondArg = pSecondArg;
mi_heap_visit_blocks(mi_heap_get_default(), true , &mi_block_visit_funHandler, &inst);
}
void UpdateStats() override
{
auto other = AuDebug::gReserveHeap->GetStats();
this->stats.bIsSharedWithOtherHeaps = true;
this->stats.uBytesLiveCounter = gBytesCounterAllocated + other.uBytesLiveCounter;
this->stats.uBytesPeakCounter = AuMax(gBytesCounterPeak, other.uBytesPeakCounter);
if (!this->stats.uBytesCapacity)
{
this->stats.uBytesCapacity = Aurora::HWInfo::GetMemStatSystem /*should be process, but process is this with extra steps.*/().value_or(AuHwInfo::RamStat { }).qwAvailable;
}
}
};
static DefaultHeap gDefaultAllocation;
AUKN_SYM Heap *DefaultDiscontiguousHeapNew()
{
return &gDefaultAllocation;
}
AUKN_SYM void DefaultDiscontiguousHeapRelease(Heap * heap) {}
}

View File

@ -1,12 +0,0 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: DefaultHeap.hpp
Date: 2021-6-13
Author: Reece
***/
#pragma once
namespace Aurora::Memory
{
}

View File

@ -1,432 +0,0 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: Heap.cpp
Date: 2021-6-12
Author: Reece
***/
#include <Source/RuntimeInternal.hpp>
#include "Memory.hpp"
#include "Heap.hpp"
#include "mimalloc.h"
#include "o1heap.hpp"
#if defined(AURORA_IS_POSIX_DERIVED)
#include <sys/mman.h>
#endif
namespace Aurora::Memory
{
static AuUInt32 RoundPageUp(AuUInt32 value)
{
return AuPageRoundUp(value, HWInfo::GetPageSize());
}
static void *HeapLargeAllocate(AuUInt length)
{
length = RoundPageUp(length);
#if defined(AURORA_IS_MODERNNT_DERIVED)
return VirtualAlloc(nullptr, length, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
#elif defined(AURORA_IS_POSIX_DERIVED)
return mmap(0, length, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
#else
// ideally we should page align.
// i think mimalloc has fast paths with warnings for overly large passthrough allocations. unsure.
// 32 alignment in the fastest way mimalloc can provide us memory seems adequate
// it's very easy for mimalloc to seethe at larger allocations, but it does have slowpaths to handle them
return AuMemory::FAlloc<void *>(length, 32);
#endif
}
static void HeapLargeFree(void *buffer, AuUInt length)
{
length = RoundPageUp(length);
#if defined(AURORA_IS_MODERNNT_DERIVED)
VirtualFree(buffer, 0, MEM_RELEASE);
#elif defined(AURORA_IS_POSIX_DERIVED)
munmap(buffer, length);
#else
AuMemory::Free(buffer);
mi_collect(false);
#endif
}
struct InternalHeap : BaseHeap, AuEnableSharedFromThis<InternalHeap>
{
virtual AuSPtr<Heap> AllocateDivision(AuUInt32 heap, AuUInt32 alignment) override;
InternalHeap() : heap_(nullptr), count_(0)
{ }
virtual ~InternalHeap();
bool ownsMemory_ {};
bool Init(AuUInt length, void *ptr = nullptr);
typedef struct FragmentHeader
{
void *next;
void *prev;
size_t size;
bool used;
} FragmentHeader;
static AuUInt GetHeapSize(const void *ptr)
{
return reinterpret_cast<const FragmentHeader *>(ptr)[-1].size;
}
Types::size_t GetChunkSize(const void *head) override;
void *_FAlloc(Types::size_t length) override;
void *_FAlloc(Types::size_t length, Types::size_t align) override;
void *_ZAlloc(Types::size_t length) override;
void *_ZAlloc(Types::size_t length, Types::size_t align) override;
void *_ZRealloc(void *buffer, Types::size_t length) override;
void *_ZRealloc(void *buffer, Types::size_t length, Types::size_t align) override;
void *_FRealloc(void *buffer, Types::size_t length) override;
void *_FRealloc(void *buffer, Types::size_t length, Types::size_t align) override;
void _Free(void *buffer) override;
AuSPtr<Heap> GetSelfReference() override;
Heap *GetSelfReferenceRaw() override;
void TryRelease();
void DecrementUsers();
void RequestTermination();
void UpdateStats() override;
void WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg) override;
private:
O1HeapInstance *heap_ {};
int count_ {};
bool isDangling_ {};
};
struct DeletableHeap : InternalHeap
{
Heap *parent {};
void *ptr2_ {};
DeletableHeap(Heap *parent, void *ptr);
~DeletableHeap();
};
DeletableHeap::DeletableHeap(Heap *parent, void *ptr) : parent(parent), ptr2_(ptr)
{
}
DeletableHeap::~DeletableHeap()
{
if (this->ptr2_)
{
parent->Free(this->ptr2_);
}
}
InternalHeap::~InternalHeap()
{
SysAssertDbgExp(count_ == 0);
if (this->base_)
{
if (this->heap_)
{
this->heap_ = nullptr;
}
if (this->ownsMemory_)
{
HeapLargeFree(this->base_, this->length_);
this->base_ = nullptr;
}
}
}
bool InternalHeap::Init(AuUInt length, void *ptr)
{
SysAssert(!this->base_, "heap already initialized");
SysAssert(length, "invalid heap allocation");
if (ptr)
{
this->base_ = ptr;
this->length_ = length;
this->ownsMemory_ = false;
}
else
{
if (length <= 4096)
{
length = 4086;
}
if (!(this->base_ = HeapLargeAllocate(length)))
{
return false;
}
this->ownsMemory_ = true;
this->length_ = length;
}
if (!(this->heap_ = o1heapInit(this->base_, length)))
{
return false;
}
return true;
}
Types::size_t InternalHeap::GetChunkSize(const void *head)
{
return InternalHeap::GetHeapSize(head);
}
AuSPtr<Heap> InternalHeap::AllocateDivision(AuUInt32 heap, AuUInt32 alignment)
{
return AllocateDivisionGlobal(this, heap, alignment);
}
AuSPtr<Heap> AllocateDivisionGlobal(Heap *heap, AuUInt32 length, AuUInt32 alignment)
{
auto ptr = heap->ZAlloc<void *>(length, alignment);
if (!ptr)
{
return {};
}
auto ret = AuMakeShared<DeletableHeap>(heap, ptr);
if (!ret)
{
heap->Free(ptr);
return {};
}
if (!ret->Init(length, ptr))
{
return {};
}
return ret;
}
void *InternalHeap::_FAlloc(Types::size_t length)
{
if (!this->heap_)
{
return nullptr;
}
auto ret = o1heapAllocate(this->heap_, length);
if (ret)
{
AuAtomicAdd(&this->count_, 1);
}
return ret;
}
void *InternalHeap::_FAlloc(Types::size_t length, Types::size_t align)
{
SysAssert(align <= O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
return this->_FAlloc(length);
}
void *InternalHeap::_ZAlloc(Types::size_t length)
{
if (!this->heap_)
{
return nullptr;
}
auto ptr = this->_FAlloc(length);
if (!ptr)
{
return nullptr;
}
AuMemset(ptr, 0, length);
return ptr;
}
void *InternalHeap::_ZAlloc(Types::size_t length, Types::size_t align)
{
SysAssert(align <= O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
return _ZAlloc(length);
}
void *InternalHeap::_ZRealloc(void *buffer, Types::size_t length)
{
auto prevLength = GetHeapSize(buffer);
auto alloc = this->_ZAlloc(length);
if (!alloc)
{
return nullptr;
}
AuMemcpy(alloc, buffer, AuMin(prevLength, length));
this->_Free(buffer);
return alloc;
}
void *InternalHeap::_ZRealloc(void *buffer, Types::size_t length, Types::size_t align)
{
SysAssert(align <= O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
return this->_ZRealloc(buffer, length);
}
void *InternalHeap::_FRealloc(void *buffer, Types::size_t length)
{
auto prevLength = GetHeapSize(buffer);
auto alloc = this->_FAlloc(length);
if (!alloc)
{
return nullptr;
}
AuMemcpy(alloc, buffer, AuMin(prevLength, length));
this->_Free(buffer);
return alloc;
}
void *InternalHeap::_FRealloc(void *buffer, Types::size_t length, Types::size_t align)
{
SysAssert(align <= O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
return this->_FRealloc(buffer, length);
}
void InternalHeap::_Free(void *buffer)
{
if (buffer == nullptr)
{
return;
}
o1heapFree(this->heap_, buffer);
DecrementUsers();
}
void InternalHeap::DecrementUsers()
{
if (AuAtomicSub(&this->count_, 1) == 0)
{
TryRelease();
}
}
void InternalHeap::TryRelease()
{
if (!this->isDangling_)
{
return;
}
if (count_ == 0)
{
delete this;
}
}
void InternalHeap::RequestTermination()
{
if (AuAtomicLoad(&this->count_))
{
SysPushErrorMemory("Heap life was less than its allocations, waiting for final free");
SysPushErrorMemory("Reporting using mayday!");
// Write a crash dump for later review, and do not panic.
// We just have a leak with no sign of corruption
Telemetry::Mayday();
this->isDangling_ = true;
}
else
{
delete this;
}
}
void InternalHeap::UpdateStats()
{
auto pDiag = o1heapGetDiagnostics(this->heap_);
this->stats.uBytesLiveCounter = pDiag.allocated;
this->stats.uBytesCapacity = pDiag.capacity;
this->stats.uBytesPeakCounter = pDiag.peak_allocated;
}
void InternalHeap::WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg)
{
o1heapTraverseHeap(this->heap_, fCallback, pSecondArg);
}
AuSPtr<Heap> InternalHeap::GetSelfReference()
{
try
{
return AuSharedFromThis();
}
catch (...)
{
return {};
}
}
Heap *InternalHeap::GetSelfReferenceRaw()
{
return this;
}
AUKN_SYM Heap *AllocHeapNew(AuUInt size)
{
auto heap = _new InternalHeap();
if (!heap)
{
return nullptr;
}
if (!heap->Init(size, nullptr))
{
delete heap;
return nullptr;
}
return heap;
}
AUKN_SYM void AllocHeapRelease(Heap *heap)
{
static_cast<InternalHeap *>(heap)->RequestTermination();
}
AUKN_SYM Heap *RequestHeapOfRegionNew(const MemoryViewWrite &memory)
{
if (!memory)
{
SysPushErrorArg();
return nullptr;
}
auto heap = _new InternalHeap();
if (!heap)
{
return nullptr;
}
if (!heap->Init(memory.length, memory.ptr))
{
delete heap;
return nullptr;
}
return heap;
}
AUKN_SYM void RequestHeapOfRegionRelease(Heap *heap)
{
static_cast<InternalHeap *>(heap)->RequestTermination();
}
}

View File

@ -1,28 +0,0 @@
/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: Heap.hpp
Date: 2021-6-12
Author: Reece
***/
#pragma once
namespace Aurora::Memory
{
struct BaseHeap : Heap
{
void *base_ {};
AuUInt length_ {};
HeapStats stats;
virtual void UpdateStats() = 0;
inline HeapStats &GetStats() override
{
UpdateStats();
return stats;
}
};
AuSPtr<Heap> AllocateDivisionGlobal(Heap *heap, AuUInt32 length, AuUInt32 alignment);
}

View File

@ -6,9 +6,7 @@
Author: Reece Author: Reece
***/ ***/
#include <Source/RuntimeInternal.hpp> #include <Source/RuntimeInternal.hpp>
#include "Memory.hpp" #include "AuMemory.hpp"
#include "SwapLock.hpp"
#include <Source/HWInfo/AuHWInfo.hpp>
namespace Aurora::Memory::Transition namespace Aurora::Memory::Transition
{ {