AuroraRuntime/Include/Aurora/Memory/Heap.hpp
Jamie Reece Wilson 8be1afe570 [+] AuMemory::Heap adapters for third party heap allocators
[+] AuMemory::HeapAdapterInterface to describe aforementioned heap allocators of a very limited API
[+] AuMemory::HeapAdapter[Unique,Shared,]
[+] HeapWin32Adapter to convert HANDLE hHeaps of win32s CreateHeap (RtlCreateHeap?) into AuMemory::Heaps
2024-07-19 09:06:56 +01:00

263 lines
9.3 KiB
C++

/***
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
File: Heap.hpp
Date: 2021-6-9
Author: Reece
***/
#pragma once
namespace Aurora::Memory
{
struct ProxyHeap;
static const AuUInt8 kHeapSize = 128;
static const AuUInt8 kHeap2Size = 255;
template <class T>
struct CppHeapWrapper;
/**
* Note: The following public global aliases exists for heap and/or global process heap based allocations:
*
* AuHUPOf_t<T> AuNewClassArrayUnique([pHeap, ]uElements, ...)
* AuSPtr<T> AuNewClassArray([pHeap, ]uElements, ...)
* AuHUPOf_t<T> AuNewClassUnique([pHeap, ] ...)
* AuSPtr<T> AuNewClass([pHeap, ] ...)
* AuHUPOf_t<T> AuNullHeapPointer<T>()
*/
struct Heap
{
virtual AuSPtr<Heap> AllocateDivision(AuUInt32 heap, AuUInt32 alignment = 32) = 0;
virtual Types::size_t GetChunkSize(const void *pHead) = 0;
virtual HeapStats &GetStats() = 0;
virtual void WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg) = 0;
/// Potentially slower, zero allocate
template<typename T = void *>
T ZAlloc(Types::size_t uLength);
/// POD zero allocation
template<typename T = void *>
T ZAlloc(Types::size_t uLength, Types::size_t uAlignment);
/// POD zero allocation
template<typename T>
T *ZAlloc();
/// POD array, zero allocation
template<typename T>
T *NewArray(Types::size_t uLength);
/// POD array, zero allocation
template<typename T>
T *NewArray(Types::size_t uLength, Types::size_t uAlignment);
/// Fast, POD, non-zeroing allocation
template<typename T = void *>
T FAlloc(Types::size_t uLength);
/// Fast, POD, non-zeroing allocation
template<typename T = void *>
T FAlloc(Types::size_t uLength, Types::size_t uAlignment);
/// Fast, POD, non-zeroing allocation
template<typename T>
T *FAlloc();
// Reallocs
/// POD, zero-based expansion or reallocation
template<typename T>
T ZRealloc(T pHead, Types::size_t uLength);
/// POD, zero-based expansion or reallocation
template<typename T>
T ZRealloc(T pHead, Types::size_t uLength, Types::size_t uAlignment);
/// POD, expansion or reallocation
template<typename T>
T FRealloc(T pHead, Types::size_t uLength);
/// POD, expansion or reallocation
template<typename T>
T FRealloc(T pHead, Types::size_t uLength, Types::size_t uAlignment);
/// Free
template<typename T>
void Free(T pHead);
protected:
template <typename T>
static void DeleteThat(T *pThat);
template <typename T>
static void DeleteThatArray(T *pThat);
template <typename T>
static void DeleteThatArray2(T *pThat);
template <typename T, typename Z>
static void DeleteThatCastedOnce(T *pThat);
template <typename T>
static void RetardedSpecWrittenByRetards(T *pThat);
public:
template <class T, class ...Args>
AuSPtr<T> NewClass(Args &&...args);
// note: callers can use AuHUPOf_t<Z> pUniquePointer = AuNullHeapPointer<Z>()
template <class T, class Z = T, class ...Args>
AuUPtr<Z, decltype(&Heap::DeleteThat<Z>)> NewClassUnique(Args &&...args);
template <class T, class ...Args>
AuSPtr<T> NewClassArray(AuUInt uElements, Args &&... fillCtr);
template <class T, class ...Args>
AuSPtr<T> NewClassArray2(AuUInt uElements, AuUInt uAlignment, Args &&... fillCtr);
// note: callers can use AuHUPOf_t<T> pUniquePointer = AuNullHeapPointer<T>()
template <class T, class ...Args>
AuUPtr<T, decltype(&Heap::DeleteThat<T>)> NewClassArrayUnique(AuUInt uElements, Args &&... fillCtr);
template <class T, class ...Args>
AuUPtr<T, decltype(&Heap::DeleteThat<T>)> NewClassArray2Unique(AuUInt uElements, AuUInt uAlignment, Args &&... fillCtr);
template <class T>
cstatic AuUPtr<T, decltype(&Heap::DeleteThat<T>)> NullUniquePointer();
template <class Z, class T>
cstatic AuUPtr<Z, decltype(&Heap::DeleteThat<Z>)> CastPointer(AuUPtr<T, decltype(&Heap::DeleteThat<T>)> &&pInPointer);
template <typename T>
using HUPOf_t = AuUPtr<T, decltype(&Heap::DeleteThat<T>)>;
protected:
friend struct ProxyHeap;
friend struct HeapAccessor;
virtual AuSPtr<Heap> GetSelfReference() = 0; // may return empty/default. not all heaps are sharable.
virtual Heap *GetSelfReferenceRaw() = 0;
virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength) = 0;
virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength, Types::size_t uAlignment) = 0;
virtual AU_ALLOC void *_FAlloc(Types::size_t uLength) = 0;
virtual AU_ALLOC void *_FAlloc(Types::size_t uLength, Types::size_t uAlignment) = 0;
virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0;
virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength) = 0;
virtual AU_ALLOC void *_FRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0;
virtual AU_ALLOC void *_FRealloc(void *pBase, Types::size_t uLength) = 0;
virtual void _Free(void* pBase) = 0;
};
struct HeapAccessor
{
cstatic AuSPtr<Heap> GetSelfReference(Heap *pHeap)
{
return pHeap->GetSelfReference();
}
cstatic Heap *GetSelfReferenceRaw(Heap *pHeap)
{
return pHeap->GetSelfReferenceRaw();
}
};
struct HeapAdapterHandle
{
void *pReserved[2];
};
struct HeapAdapterInterface
{
HeapAdapterHandle handle;
// Required:
void *(* fAllocate)(HeapAdapterHandle *pHandle,
AuUInt uLength,
AuUInt uAlignment) = nullptr;
// Required:
void (* fFree)(HeapAdapterHandle *pHandle,
void *pPointer) = nullptr;
// Optional:
AuUInt (* fGetBlockSize)(HeapAdapterHandle *pHandle,
void *pPointer) = nullptr;
// Optional:
void (* fHeapDestroy)(HeapAdapterHandle *pHandle) = nullptr;
//
bool bHasAlignmentAwareness {};
};
/**
* Returns a heap interface backed by the default allocator
*/
AUKN_SHARED_API(DefaultDiscontiguousHeap, Heap);
inline Heap *GetDefaultDiscontiguousHeap()
{
return DefaultDiscontiguousHeapNew();
}
inline AuSPtr<Heap> GetDefaultDiscontiguousHeapShared()
{
// Might not allocate the control block under some STLs, unlike DefaultDiscontiguousHeapSharedShared() which will generally always allocate a control block under most STLs
return AuUnsafeRaiiToShared(GetDefaultDiscontiguousHeap());
}
/**
* Allocates uLength amount of contiguous virtual memory
* @return a heap backed by uLength bytes of virtual memory
* @warning the SOO variant cannot guarantee release-on-last-free and will panic if uLength cannot be allocated. Use AllocHeap[Shared/Unique/New](uLength) instead.
*/
AUKN_SHARED_SOO2_NCM(AllocHeap, Heap, kHeapSize, ((AuUInt, uLength)),
AuUInt uLength);
/**
* @warning the SOO variant cannot guarantee release-on-last-free and will panic if an invalid memory handle is provided.
*/
AUKN_SHARED_SOO2_NCM(RequestHeapOfRegion, Heap, kHeapSize, ((const MemoryViewWrite &, memory)),
const MemoryViewWrite &memory);
/**
* @warning the SOO variant cannot guarantee release-on-last-free and will panic if an invalid memory handle is provided.
*/
AUKN_SHARED_SOO2_NCM(RequestHeapOfSharedRegion, Heap, kHeapSize, ((const AuSPtr<MemoryViewWrite> &, memory)),
const AuSPtr<MemoryViewWrite> &pMemory);
/**
* Proxies an existing heap with encapsulated statistics.
* This is intended for debugging purposes when accurate heap stats of a heap-subset are desired.
* @warning this heap cannot guarantee release-on-last-free
*/
AUKN_SHARED_SOO2_NCM(HeapProxy, Heap, kHeap2Size, ((const AuSPtr<Heap> &, pHead)),
const AuSPtr<Heap> &pHead);
/**
* Proxies an existing heap with encapsulated statistics and leak detector
* This is intended for debugging purposes when accurate heap stats of a heap-subset are desired.
* @warning this heap cannot guarantee release-on-last-free
*/
AUKN_SHARED_SOO2_NCM(HeapProxyEx, Heap, kHeap2Size, ((const AuSPtr<Heap> &, pHead), (LeakFinderAlloc_f, pfAlloc), (LeakFinderFree_f, pfFree)),
const AuSPtr<Heap> &pHead,
LeakFinderAlloc_f pfAlloc,
LeakFinderFree_f pfFree);
/**
* Proxies an existing heap allocator library of a malloc and free; bonus points for aligned malloc, get allocation size, and destroy
*/
AUKN_SHARED_SOO2_NCM(HeapAdapter, Heap, kHeap2Size, ((const HeapAdapterInterface &, adapterInterface)),
const HeapAdapterInterface &adapterInterface);
#if defined(AURORA_IS_MODERNNT_DERIVED)
AUKN_SHARED_SOO2_NCM(HeapWin32Adapter, Heap, kHeap2Size, ((void *, hHeap)), void *hHeap);
#endif
}