416 lines
14 KiB
C++
416 lines
14 KiB
C++
/***
|
|
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
|
|
|
|
File: Heap.hpp
|
|
Date: 2021-6-9
|
|
Author: Reece
|
|
***/
|
|
#pragma once
|
|
|
|
namespace Aurora::Memory
|
|
{
|
|
struct ProxyHeap;
|
|
|
|
struct Heap
|
|
{
|
|
virtual AuSPtr<Heap> AllocateDivision(AuUInt32 heap, AuUInt32 alignment = 32) = 0;
|
|
virtual Types::size_t GetChunkSize(const void *pHead) = 0;
|
|
virtual HeapStats &GetStats() = 0;
|
|
|
|
template<typename T = void *>
|
|
T ZAlloc(Types::size_t length)
|
|
{
|
|
return reinterpret_cast<T>(_ZAlloc(length));
|
|
}
|
|
|
|
template<typename T = void *>
|
|
T ZAlloc(Types::size_t length, Types::size_t align)
|
|
{
|
|
return reinterpret_cast<T>(_ZAlloc(length, align));
|
|
}
|
|
|
|
template<typename T>
|
|
T *ZAlloc()
|
|
{
|
|
return reinterpret_cast<T *>(_ZAlloc(sizeof(T)));
|
|
}
|
|
|
|
template<typename T>
|
|
T *NewArray(Types::size_t count)
|
|
{
|
|
return ZAlloc<T *>(count * sizeof(T));
|
|
}
|
|
|
|
template<typename T>
|
|
T *NewArray(Types::size_t count, Types::size_t align)
|
|
{
|
|
return ZAlloc<T *>(count * sizeof(T), align);
|
|
}
|
|
|
|
/// Fast, unsafe alloc
|
|
template<typename T = void *>
|
|
T FAlloc(Types::size_t length)
|
|
{
|
|
return reinterpret_cast<T>(_FAlloc(length));
|
|
}
|
|
|
|
template<typename T = void *>
|
|
T FAlloc(Types::size_t length, Types::size_t align)
|
|
{
|
|
return reinterpret_cast<T>(_FAlloc(length, align));
|
|
}
|
|
|
|
template<typename T>
|
|
T ZRealloc(T pHead, Types::size_t length)
|
|
{
|
|
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), length));
|
|
}
|
|
|
|
template<typename T>
|
|
T ZRealloc(T pHead, Types::size_t length, Types::size_t alloc)
|
|
{
|
|
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), length), alloc);
|
|
}
|
|
|
|
template<typename T>
|
|
T FRealloc(T pHead, Types::size_t length)
|
|
{
|
|
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), length));
|
|
}
|
|
|
|
template<typename T>
|
|
T FRealloc(T pHead, Types::size_t length, Types::size_t alloc)
|
|
{
|
|
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), length), alloc);
|
|
}
|
|
|
|
template<typename T>
|
|
void Free(T pHead)
|
|
{
|
|
_Free(reinterpret_cast<void *>(pHead));
|
|
}
|
|
|
|
protected:
|
|
|
|
template <typename T>
|
|
static void DeleteThat(T *pThat)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *));
|
|
|
|
if constexpr (AuIsClass_v<T>
|
|
#if !defined(AURT_HEAP_NO_STL)
|
|
&& !std::is_trivially_destructible_v<T>
|
|
#endif
|
|
)
|
|
{
|
|
pThat->~T();
|
|
}
|
|
|
|
auto &pHeap = *(Heap **)(((char *)pThat) - kAlignment);
|
|
pHeap->_Free(&pHeap);
|
|
}
|
|
|
|
template <typename T>
|
|
static void DeleteThatArray(T *pThat)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *) * 2);
|
|
|
|
auto pVoids = (void **)(((char *)pThat) - kAlignment);
|
|
auto pHeap = (Heap *)pVoids[0];
|
|
auto uCount = (AuUInt)pVoids[1];
|
|
|
|
if constexpr (AuIsClass_v<T>
|
|
#if !defined(AURT_HEAP_NO_STL)
|
|
&& !std::is_trivially_destructible_v<T>
|
|
#endif
|
|
)
|
|
{
|
|
for (AU_ITERATE_N(i, uCount))
|
|
{
|
|
auto &refElement = pThat[i];
|
|
refElement.~T();
|
|
}
|
|
}
|
|
|
|
pHeap->_Free(pVoids);
|
|
}
|
|
|
|
template <typename T>
|
|
static void RetardedSpecWrittenByRetards(T *pThat)
|
|
{
|
|
|
|
}
|
|
|
|
public:
|
|
|
|
template <class T, class ...Args>
|
|
AuSPtr<T> NewClass(Args &&...args)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *));
|
|
AuUInt8 *pPtr;
|
|
|
|
auto pThat = this->GetSelfReferenceRaw();
|
|
if (!pThat)
|
|
{
|
|
pThat = this;
|
|
}
|
|
|
|
if constexpr (AuIsClass_v<T>
|
|
#if !defined(AURT_HEAP_NO_STL)
|
|
&& !std::is_trivially_constructible_v<T>
|
|
#endif
|
|
)
|
|
{
|
|
pPtr = pThat->FAlloc<AuUInt8 *>(sizeof(T) + kAlignment, kAlignment);
|
|
if (pPtr)
|
|
{
|
|
new (pPtr + kAlignment) T(AuForward<Args &&>(args)...);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
pPtr = pThat->ZAlloc<AuUInt8 *>(sizeof(T) + kAlignment, kAlignment);
|
|
}
|
|
|
|
if (!pPtr)
|
|
{
|
|
return {};
|
|
}
|
|
|
|
*(void **)pPtr = pThat;
|
|
return AuSPtr<T>((T *)(pPtr + kAlignment), &Heap::DeleteThat<T>);
|
|
}
|
|
|
|
|
|
template <class T, class ...Args>
|
|
AuUPtr<T, decltype(&Heap::DeleteThat<T>)> NewClassUnique(Args &&...args)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *));
|
|
AuUInt8 *pPtr;
|
|
|
|
auto pThat = this->GetSelfReferenceRaw();
|
|
if (!pThat)
|
|
{
|
|
pThat = this;
|
|
}
|
|
|
|
if constexpr (AuIsClass_v<T>
|
|
#if !defined(AURT_HEAP_NO_STL)
|
|
&& !std::is_trivially_constructible_v<T>
|
|
#endif
|
|
)
|
|
{
|
|
pPtr = pThat->FAlloc<AuUInt8 *>(sizeof(T) + kAlignment, kAlignment);
|
|
if (pPtr)
|
|
{
|
|
new (pPtr + kAlignment) T(AuForward<Args &&>(args)...);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
pPtr = pThat->ZAlloc<AuUInt8 *>(sizeof(T) + kAlignment, kAlignment);
|
|
}
|
|
|
|
if (!pPtr)
|
|
{
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>(nullptr, &Heap::RetardedSpecWrittenByRetards<T>);
|
|
}
|
|
|
|
*(void **)pPtr = pThat;
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>((T *)(pPtr + kAlignment), &Heap::DeleteThat<T>);
|
|
}
|
|
|
|
template <class T, class ...Args>
|
|
AuSPtr<T> NewClassArray(AuUInt uElements, Args &&... fillCtr)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *) * 2);
|
|
AuUInt8 *pPtr;
|
|
|
|
if (!uElements)
|
|
{
|
|
return {};
|
|
}
|
|
|
|
auto pThat = this->GetSelfReferenceRaw();
|
|
if (!pThat)
|
|
{
|
|
pThat = this;
|
|
}
|
|
|
|
if constexpr (AuIsClass_v<T>
|
|
#if !defined(AURT_HEAP_NO_STL)
|
|
&& !std::is_trivially_constructible_v<T>
|
|
#endif
|
|
)
|
|
{
|
|
if (bool(pPtr = pThat->FAlloc<AuUInt8 *>((sizeof(T) * uElements) + kAlignment, kAlignment)))
|
|
{
|
|
for (AU_ITERATE_N(i, uElements))
|
|
{
|
|
new (pPtr + kAlignment + (sizeof(T) * i)) T(AuForward<Args &&>(fillCtr)...);
|
|
}
|
|
}
|
|
}
|
|
else
|
|
{
|
|
if (bool(pPtr = pThat->ZAlloc<AuUInt8 *>((sizeof(T) * uElements) + kAlignment, kAlignment)))
|
|
{
|
|
if constexpr (sizeof...(Args) != 0)
|
|
{
|
|
#if defined(AURT_HEAP_NO_STL)
|
|
static_assert(false);
|
|
#else
|
|
auto pElements = (T *)(pPtr + kAlignment);
|
|
std::fill(pElements, pElements + uElements, AuForward<Args &&>(fillCtr)...);
|
|
#endif
|
|
}
|
|
}
|
|
}
|
|
|
|
if (!pPtr)
|
|
{
|
|
return {};
|
|
}
|
|
|
|
auto pVoids = (void **)pPtr;
|
|
pVoids[0] = pThat;
|
|
pVoids[1] = (void *)uElements;
|
|
|
|
return AuSPtr<T>((T *)(pPtr + kAlignment), &Heap::DeleteThatArray<T>);
|
|
}
|
|
|
|
template <class T, class ...Args>
|
|
AuUPtr<T, decltype(&Heap::DeleteThat<T>)> NewClassArrayUnique(AuUInt uElements, Args &&... fillCtr)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *) * 2);
|
|
AuUInt8 *pPtr;
|
|
|
|
if (!uElements)
|
|
{
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>(nullptr, &Heap::RetardedSpecWrittenByRetards<T>);
|
|
}
|
|
|
|
auto pThat = this->GetSelfReferenceRaw();
|
|
if (!pThat)
|
|
{
|
|
pThat = this;
|
|
}
|
|
|
|
if constexpr (AuIsClass_v<T>
|
|
#if !defined(AURT_HEAP_NO_STL)
|
|
&& !std::is_trivially_constructible_v<T>
|
|
#endif
|
|
)
|
|
{
|
|
if (bool(pPtr = pThat->FAlloc<AuUInt8 *>((sizeof(T) * uElements) + kAlignment, kAlignment)))
|
|
{
|
|
for (AU_ITERATE_N(i, uElements))
|
|
{
|
|
new (pPtr + kAlignment + (sizeof(T) * i)) T(AuForward<Args &&>(fillCtr)...);
|
|
}
|
|
}
|
|
}
|
|
else
|
|
{
|
|
if (bool(pPtr = pThat->ZAlloc<AuUInt8 *>((sizeof(T) * uElements) + kAlignment, kAlignment)))
|
|
{
|
|
if constexpr (sizeof...(Args) != 0)
|
|
{
|
|
#if defined(AURT_HEAP_NO_STL)
|
|
static_assert(false);
|
|
#else
|
|
auto pElements = (T *)(pPtr + kAlignment);
|
|
std::fill(pElements, pElements + uElements, AuForward<Args &&>(fillCtr)...);
|
|
#endif
|
|
}
|
|
}
|
|
}
|
|
|
|
if (!pPtr)
|
|
{
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>(nullptr, &Heap::RetardedSpecWrittenByRetards<T>);
|
|
}
|
|
|
|
auto pVoids = (void **)pPtr;
|
|
pVoids[0] = pThat;
|
|
pVoids[1] = (void *)uElements;
|
|
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>((T *)(pPtr + kAlignment), &Heap::DeleteThatArray<T>);
|
|
}
|
|
|
|
template<typename T>
|
|
static AuSPtr<T> ToSmartPointer(AuSPtr<Heap> heap,
|
|
T *pHead,
|
|
bool bPinHeap = true)
|
|
{
|
|
auto handle = bPinHeap ?
|
|
heap :
|
|
AuSPtr<Heap> {};
|
|
auto pHeap = heap.get();
|
|
return AuSPtr<T>(pHead,
|
|
[handle, pHeap](T *pDeleteMe)
|
|
{
|
|
if constexpr (AuIsClass_v<T>
|
|
#if !defined(AURT_HEAP_NO_STL)
|
|
&& !std::is_trivially_destructible_v<T>
|
|
#endif
|
|
)
|
|
{
|
|
pDeleteMe->~T();
|
|
}
|
|
|
|
pHeap->Free(pDeleteMe);
|
|
});
|
|
}
|
|
|
|
template <typename T>
|
|
using HUPOf_t = AuUPtr<T, decltype(&Heap::DeleteThat<T>)>;
|
|
|
|
protected:
|
|
friend struct ProxyHeap;
|
|
|
|
virtual AuSPtr<Heap> GetSelfReference() = 0; // may return empty/default. not all heaps are sharable.
|
|
virtual Heap *GetSelfReferenceRaw() = 0;
|
|
|
|
virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength) = 0;
|
|
virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength, Types::size_t align) = 0;
|
|
virtual AU_ALLOC void *_FAlloc(Types::size_t uLength) = 0;
|
|
virtual AU_ALLOC void *_FAlloc(Types::size_t uLength, Types::size_t align) = 0;
|
|
virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0;
|
|
virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength) = 0;
|
|
virtual AU_ALLOC void *_FRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0;
|
|
virtual AU_ALLOC void *_FRealloc(void *pBase, Types::size_t uLength) = 0;
|
|
virtual void _Free(void* pBase) = 0;
|
|
};
|
|
|
|
/**
|
|
Returns a heap interface backed by the default allocator
|
|
*/
|
|
AUKN_SHARED_API(GetDefaultDiscontiguousHeap, Heap);
|
|
|
|
/**
|
|
Allocates uLength amount of contiguous virtual memory
|
|
|
|
@warning Heaps are guaranteed to outlive their allocations; heaps are the one object that effectively own a single reference count on themselves.
|
|
Requesting termination before all of its' memory has been free will result, pHead at worst, a warning.
|
|
Expect to leak unless all allocs have been paired by a free.
|
|
|
|
I do not expect to implement force frees simply because all our primary use cases keep track of dtors to forcefully release leaked objects.
|
|
Use RequestHeapOfRegion to be backed by caller owned memory.
|
|
|
|
@return a heap backed by uLength bytes of virtual memory
|
|
*/
|
|
AUKN_SHARED_API(AllocHeap, Heap, AuUInt uLength);
|
|
|
|
AUKN_SHARED_API(RequestHeapOfRegion, Heap, void *pPtr, AuUInt uLength);
|
|
|
|
// AllocHeap but use mimalloc (or the default allocator) instead
|
|
AUKN_SHARED_API(AllocHeapMimalloc, Heap, AuUInt uLength);
|
|
|
|
// Proxies an existing heap with encapsulated statistics
|
|
AUKN_SHARED_API(HeapProxy, Heap, const AuSPtr<Heap> &pHead);
|
|
|
|
// Proxies an existing heap with encapsulated statistics and leak detector
|
|
AUKN_SHARED_API(HeapProxyEx, Heap, const AuSPtr<Heap> &pHead, LeakFinderAlloc_f pfAlloc, LeakFinderFree_f pfFree);
|
|
}
|