Jamie Reece Wilson
298ab88648
[+] AuMemoryView::TryDemoteFromSharedView [+] AuMemory::RequestHeapOfSharedRegion
513 lines
17 KiB
C++
513 lines
17 KiB
C++
/***
|
|
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
|
|
|
|
File: Heap.hpp
|
|
Date: 2021-6-9
|
|
Author: Reece
|
|
***/
|
|
#pragma once
|
|
|
|
namespace Aurora::Memory
|
|
{
|
|
struct ProxyHeap;
|
|
static const AuUInt8 kHeapSize = 128;
|
|
static const AuUInt8 kHeap2Size = 255;
|
|
|
|
struct Heap
|
|
{
|
|
virtual AuSPtr<Heap> AllocateDivision(AuUInt32 heap, AuUInt32 alignment = 32) = 0;
|
|
virtual Types::size_t GetChunkSize(const void *pHead) = 0;
|
|
virtual HeapStats &GetStats() = 0;
|
|
virtual void WalkHeap(bool(*fCallback)(void *, void *), void *pSecondArg) = 0;
|
|
|
|
// Potentially slower, zero allocate
|
|
template<typename T = void *>
|
|
T ZAlloc(Types::size_t uLength)
|
|
{
|
|
if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
|
|
{
|
|
return reinterpret_cast<T>(_ZAlloc(uLength));
|
|
}
|
|
else
|
|
{
|
|
return reinterpret_cast<T>(_ZAlloc(uLength, alignof(AuRemovePointer_t<T>)));
|
|
}
|
|
}
|
|
|
|
template<typename T = void *>
|
|
T ZAlloc(Types::size_t uLength, Types::size_t uAlignment)
|
|
{
|
|
return reinterpret_cast<T>(_ZAlloc(uLength, uAlignment));
|
|
}
|
|
|
|
template<typename T>
|
|
T *ZAlloc()
|
|
{
|
|
return reinterpret_cast<T *>(_ZAlloc(sizeof(T), alignof(T)));
|
|
}
|
|
|
|
template<typename T>
|
|
T *NewArray(Types::size_t uLength)
|
|
{
|
|
return ZAlloc<T *>(uLength * sizeof(T), alignof(T));
|
|
}
|
|
|
|
template<typename T>
|
|
T *NewArray(Types::size_t uLength, Types::size_t uAlignment)
|
|
{
|
|
return ZAlloc<T *>(uLength * sizeof(T), uAlignment);
|
|
}
|
|
|
|
/// Fast, unsafe alloc
|
|
template<typename T = void *>
|
|
T FAlloc(Types::size_t uLength)
|
|
{
|
|
if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
|
|
{
|
|
return reinterpret_cast<T>(_FAlloc(uLength));
|
|
}
|
|
else
|
|
{
|
|
return reinterpret_cast<T>(_FAlloc(uLength, alignof(AuRemovePointer_t<T>)));
|
|
}
|
|
}
|
|
|
|
template<typename T = void *>
|
|
T FAlloc(Types::size_t uLength, Types::size_t uAlignment)
|
|
{
|
|
return reinterpret_cast<T>(_FAlloc(uLength, uAlignment));
|
|
}
|
|
|
|
template<typename T>
|
|
T *FAlloc()
|
|
{
|
|
return reinterpret_cast<T *>(_FAlloc(sizeof(T), alignof(T)));
|
|
}
|
|
|
|
// Reallocs
|
|
template<typename T>
|
|
T ZRealloc(T pHead, Types::size_t uLength)
|
|
{
|
|
if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
|
|
{
|
|
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), uLength));
|
|
}
|
|
else
|
|
{
|
|
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), uLength, alignof(AuRemovePointer_t<T>)));
|
|
}
|
|
}
|
|
|
|
template<typename T>
|
|
T ZRealloc(T pHead, Types::size_t uLength, Types::size_t uAlignment)
|
|
{
|
|
return reinterpret_cast<T>(_ZRealloc(reinterpret_cast<void *>(pHead), uLength, uAlignment));
|
|
}
|
|
|
|
template<typename T>
|
|
T FRealloc(T pHead, Types::size_t uLength)
|
|
{
|
|
if constexpr (AuIsVoid_v<AuRemovePointer_t<T>>)
|
|
{
|
|
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), uLength));
|
|
}
|
|
else
|
|
{
|
|
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), uLength, alignof(AuRemovePointer_t<T>)));
|
|
}
|
|
}
|
|
|
|
template<typename T>
|
|
T FRealloc(T pHead, Types::size_t uLength, Types::size_t uAlignment)
|
|
{
|
|
return reinterpret_cast<T>(_FRealloc(reinterpret_cast<void *>(pHead), uLength, uAlignment));
|
|
}
|
|
|
|
// Free
|
|
template<typename T>
|
|
void Free(T pHead)
|
|
{
|
|
_Free(reinterpret_cast<void *>(pHead));
|
|
}
|
|
|
|
protected:
|
|
|
|
template <typename T>
|
|
static void DeleteThat(T *pThat)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *));
|
|
|
|
if constexpr (AuIsClass_v<T> &&
|
|
!AuIsTriviallyDestructible_v<T>)
|
|
{
|
|
pThat->~T();
|
|
}
|
|
|
|
auto &pHeap = *(Heap **)(((char *)pThat) - kAlignment);
|
|
pHeap->_Free(&pHeap);
|
|
}
|
|
|
|
template <typename T>
|
|
static void DeleteThatArray(T *pThat)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *) * 2);
|
|
|
|
auto pVoids = (void **)(((char *)pThat) - kAlignment);
|
|
auto pHeap = (Heap *)pVoids[0];
|
|
auto uLength = (AuUInt)pVoids[1];
|
|
|
|
if constexpr (AuIsClass_v<T> &&
|
|
!AuIsTriviallyDestructible_v<T>)
|
|
{
|
|
for (AU_ITERATE_N(i, uLength))
|
|
{
|
|
auto &refElement = pThat[i];
|
|
refElement.~T();
|
|
}
|
|
}
|
|
|
|
pHeap->_Free(pVoids);
|
|
}
|
|
|
|
template <typename T, typename Z>
|
|
static void DeleteThatCastedOnce(T *pThat)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(Z), sizeof(void *));
|
|
|
|
auto pBaseClass = AuStaticCast<Z>(pThat);
|
|
|
|
if constexpr (AuIsClass_v<Z> &&
|
|
!AuIsTriviallyDestructible_v<Z>)
|
|
{
|
|
pBaseClass->~Z();
|
|
}
|
|
|
|
auto &pHeap = *(Heap **)(((char *)pBaseClass) - kAlignment);
|
|
pHeap->_Free(&pHeap);
|
|
}
|
|
|
|
template <typename T>
|
|
static void RetardedSpecWrittenByRetards(T *pThat)
|
|
{
|
|
|
|
}
|
|
|
|
public:
|
|
|
|
template <class T, class ...Args>
|
|
AuSPtr<T> NewClass(Args &&...args)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *));
|
|
AuUInt8 *pPtr;
|
|
|
|
auto pThat = this->GetSelfReferenceRaw();
|
|
if (!pThat)
|
|
{
|
|
pThat = this;
|
|
}
|
|
|
|
if constexpr (AuIsClass_v<T> &&
|
|
!AuIsTriviallyConstructible_v<T, Args...>)
|
|
{
|
|
pPtr = pThat->FAlloc<AuUInt8 *>(sizeof(T) + kAlignment, kAlignment);
|
|
if (pPtr)
|
|
{
|
|
new (pPtr + kAlignment) T(AuForward<Args>(args)...);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
pPtr = pThat->ZAlloc<AuUInt8 *>(sizeof(T) + kAlignment, kAlignment);
|
|
}
|
|
|
|
if (!pPtr)
|
|
{
|
|
return {};
|
|
}
|
|
|
|
*(void **)pPtr = pThat;
|
|
|
|
auto pTThat = (T *)(pPtr + kAlignment);
|
|
AUROXTL_COMMODITY_TRY
|
|
{
|
|
return AuSPtr<T>(pTThat, &Heap::DeleteThat<T>);
|
|
}
|
|
AUROXTL_COMMODITY_CATCH
|
|
{
|
|
Heap::DeleteThat<T>(pTThat);
|
|
return {};
|
|
}
|
|
}
|
|
|
|
// note: callers can use AuHUPOf_t<Z> pUniquePointer = AuNullHeapPointer<Z>()
|
|
|
|
template <class T, class Z = T, class ...Args>
|
|
AuUPtr<Z, decltype(&Heap::DeleteThat<Z>)> NewClassUnique(Args &&...args)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *));
|
|
AuUInt8 *pPtr;
|
|
|
|
auto pThat = this->GetSelfReferenceRaw();
|
|
if (!pThat)
|
|
{
|
|
pThat = this;
|
|
}
|
|
|
|
if constexpr (AuIsClass_v<T> &&
|
|
!AuIsTriviallyConstructible_v<T, Args...>)
|
|
{
|
|
pPtr = pThat->FAlloc<AuUInt8 *>(sizeof(T) + kAlignment, kAlignment);
|
|
if (pPtr)
|
|
{
|
|
new (pPtr + kAlignment) T(AuForward<Args>(args)...);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
pPtr = pThat->ZAlloc<AuUInt8 *>(sizeof(T) + kAlignment, kAlignment);
|
|
}
|
|
|
|
if (!pPtr)
|
|
{
|
|
return AuUPtr<Z, decltype(&Heap::DeleteThat<Z>)>(nullptr, &Heap::RetardedSpecWrittenByRetards<Z>);
|
|
}
|
|
|
|
*(void **)pPtr = pThat;
|
|
|
|
if constexpr (AuIsSame_v<T, Z>)
|
|
{
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>((T *)(pPtr + kAlignment), &Heap::DeleteThat<T>);
|
|
}
|
|
else
|
|
{
|
|
return Heap::CastPointer<Z>(AuMove(AuUPtr<T, decltype(&Heap::DeleteThat<T>)>((T *)(pPtr + kAlignment), &Heap::DeleteThat<T>)));
|
|
}
|
|
}
|
|
|
|
template <class T, class ...Args>
|
|
AuSPtr<T> NewClassArray(AuUInt uElements, Args &&... fillCtr)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *) * 2);
|
|
AuUInt8 *pPtr;
|
|
|
|
if (!uElements)
|
|
{
|
|
return {};
|
|
}
|
|
|
|
auto pThat = this->GetSelfReferenceRaw();
|
|
if (!pThat)
|
|
{
|
|
pThat = this;
|
|
}
|
|
|
|
if constexpr (AuIsClass_v<T> &&
|
|
!AuIsTriviallyConstructible_v<T, Args...>)
|
|
{
|
|
if (bool(pPtr = pThat->FAlloc<AuUInt8 *>((sizeof(T) * uElements) + kAlignment, kAlignment)))
|
|
{
|
|
for (AU_ITERATE_N(i, uElements))
|
|
{
|
|
new (pPtr + kAlignment + (sizeof(T) * i)) T(AuForward<Args>(fillCtr)...);
|
|
}
|
|
}
|
|
}
|
|
else
|
|
{
|
|
if (bool(pPtr = pThat->ZAlloc<AuUInt8 *>((sizeof(T) * uElements) + kAlignment, kAlignment)))
|
|
{
|
|
if constexpr (sizeof...(Args) != 0)
|
|
{
|
|
#if defined(AURT_HEAP_NO_STL)
|
|
static_assert(false);
|
|
#else
|
|
auto pElements = (T *)(pPtr + kAlignment);
|
|
std::fill(pElements, pElements + uElements, AuForward<Args>(fillCtr)...);
|
|
#endif
|
|
}
|
|
}
|
|
}
|
|
|
|
if (!pPtr)
|
|
{
|
|
return {};
|
|
}
|
|
|
|
auto pVoids = (void **)pPtr;
|
|
pVoids[0] = pThat;
|
|
pVoids[1] = (void *)uElements;
|
|
|
|
auto pTThat = (T *)(pPtr + kAlignment);
|
|
AUROXTL_COMMODITY_TRY
|
|
{
|
|
return AuSPtr<T>(pTThat, &Heap::DeleteThatArray<T>);
|
|
}
|
|
AUROXTL_COMMODITY_CATCH
|
|
{
|
|
Heap::DeleteThatArray<T>(pTThat);
|
|
return {};
|
|
}
|
|
}
|
|
|
|
// note: callers can use AuHUPOf_t<T> pUniquePointer = AuNullHeapPointer<T>()
|
|
|
|
template <class T, class ...Args>
|
|
AuUPtr<T, decltype(&Heap::DeleteThat<T>)> NewClassArrayUnique(AuUInt uElements, Args &&... fillCtr)
|
|
{
|
|
static const auto kAlignment = AuMax(alignof(T), sizeof(void *) * 2);
|
|
AuUInt8 *pPtr;
|
|
|
|
if (!uElements)
|
|
{
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>(nullptr, &Heap::RetardedSpecWrittenByRetards<T>);
|
|
}
|
|
|
|
auto pThat = this->GetSelfReferenceRaw();
|
|
if (!pThat)
|
|
{
|
|
pThat = this;
|
|
}
|
|
|
|
if constexpr (AuIsClass_v<T> &&
|
|
!AuIsTriviallyConstructible_v<T, Args...>)
|
|
{
|
|
if (bool(pPtr = pThat->FAlloc<AuUInt8 *>((sizeof(T) * uElements) + kAlignment, kAlignment)))
|
|
{
|
|
for (AU_ITERATE_N(i, uElements))
|
|
{
|
|
new (pPtr + kAlignment + (sizeof(T) * i)) T(AuForward<Args>(fillCtr)...);
|
|
}
|
|
}
|
|
}
|
|
else
|
|
{
|
|
if (bool(pPtr = pThat->ZAlloc<AuUInt8 *>((sizeof(T) * uElements) + kAlignment, kAlignment)))
|
|
{
|
|
if constexpr (sizeof...(Args) != 0)
|
|
{
|
|
#if defined(AURT_HEAP_NO_STL)
|
|
static_assert(false);
|
|
#else
|
|
auto pElements = (T *)(pPtr + kAlignment);
|
|
std::fill(pElements, pElements + uElements, AuForward<Args>(fillCtr)...);
|
|
#endif
|
|
}
|
|
}
|
|
}
|
|
|
|
if (!pPtr)
|
|
{
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>(nullptr, &Heap::RetardedSpecWrittenByRetards<T>);
|
|
}
|
|
|
|
auto pVoids = (void **)pPtr;
|
|
pVoids[0] = pThat;
|
|
pVoids[1] = (void *)uElements;
|
|
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>((T *)(pPtr + kAlignment), &Heap::DeleteThatArray<T>);
|
|
}
|
|
|
|
template <class T>
|
|
cstatic AuUPtr<T, decltype(&Heap::DeleteThat<T>)> NullUniquePointer()
|
|
{
|
|
return AuUPtr<T, decltype(&Heap::DeleteThat<T>)>(nullptr, &Heap::RetardedSpecWrittenByRetards<T>);
|
|
}
|
|
|
|
template <class Z, class T>
|
|
cstatic AuUPtr<Z, decltype(&Heap::DeleteThat<Z>)> CastPointer(AuUPtr<T, decltype(&Heap::DeleteThat<T>)> &&pInPointer)
|
|
{
|
|
if (!pInPointer)
|
|
{
|
|
return NullUniquePointer<Z>();
|
|
}
|
|
else if (pInPointer.get_deleter() == &Heap::DeleteThat<T>)
|
|
{
|
|
return AuUPtr<Z, decltype(&Heap::DeleteThat<Z>)>(AuStaticCast<Z>(pInPointer.release()), &Heap::DeleteThatCastedOnce<Z, T>);
|
|
}
|
|
else
|
|
{
|
|
return NullUniquePointer<Z>();
|
|
}
|
|
}
|
|
|
|
template <typename T>
|
|
using HUPOf_t = AuUPtr<T, decltype(&Heap::DeleteThat<T>)>;
|
|
|
|
protected:
|
|
friend struct ProxyHeap;
|
|
friend struct HeapAccessor;
|
|
|
|
virtual AuSPtr<Heap> GetSelfReference() = 0; // may return empty/default. not all heaps are sharable.
|
|
virtual Heap *GetSelfReferenceRaw() = 0;
|
|
|
|
virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength) = 0;
|
|
virtual AU_ALLOC void *_ZAlloc(Types::size_t uLength, Types::size_t uAlignment) = 0;
|
|
virtual AU_ALLOC void *_FAlloc(Types::size_t uLength) = 0;
|
|
virtual AU_ALLOC void *_FAlloc(Types::size_t uLength, Types::size_t uAlignment) = 0;
|
|
virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0;
|
|
virtual AU_ALLOC void *_ZRealloc(void *pBase, Types::size_t uLength) = 0;
|
|
virtual AU_ALLOC void *_FRealloc(void *pBase, Types::size_t uLength, Types::size_t uAlign) = 0;
|
|
virtual AU_ALLOC void *_FRealloc(void *pBase, Types::size_t uLength) = 0;
|
|
virtual void _Free(void* pBase) = 0;
|
|
};
|
|
|
|
struct HeapAccessor
|
|
{
|
|
cstatic AuSPtr<Heap> GetSelfReference(Heap *pHeap)
|
|
{
|
|
return pHeap->GetSelfReference();
|
|
}
|
|
|
|
cstatic Heap *GetSelfReferenceRaw(Heap *pHeap)
|
|
{
|
|
return pHeap->GetSelfReferenceRaw();
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Returns a heap interface backed by the default allocator
|
|
*/
|
|
AUKN_SHARED_API(DefaultDiscontiguousHeap, Heap);
|
|
|
|
inline Heap *GetDefaultDiscontiguousHeap()
|
|
{
|
|
return DefaultDiscontiguousHeapNew();
|
|
}
|
|
|
|
inline AuSPtr<Heap> GetDefaultDiscontiguousHeapShared()
|
|
{
|
|
// Might not allocate the control block under some STLs, unlike DefaultDiscontiguousHeapSharedShared() which will generally always allocate a control block under most STLs
|
|
return AuUnsafeRaiiToShared(GetDefaultDiscontiguousHeap());
|
|
}
|
|
|
|
/**
|
|
* Allocates uLength amount of contiguous virtual memory
|
|
* @return a heap backed by uLength bytes of virtual memory
|
|
* @warning the SOO variant cannot guarantee release-on-last-free and will panic if uLength cannot be allocated. Use AllocHeap[Shared/Unique/New](uLength) instead.
|
|
*/
|
|
AUKN_SHARED_SOO2_NCM(AllocHeap, Heap, kHeapSize, ((AuUInt, uLength)), AuUInt uLength);
|
|
|
|
/**
|
|
* @warning the SOO variant cannot guarantee release-on-last-free and will panic if an invalid memory handle is provided.
|
|
*/
|
|
AUKN_SHARED_SOO2_NCM(RequestHeapOfRegion, Heap, kHeapSize, ((const MemoryViewWrite &, memory)), const MemoryViewWrite &memory);
|
|
|
|
/**
|
|
* @warning the SOO variant cannot guarantee release-on-last-free and will panic if an invalid memory handle is provided.
|
|
*/
|
|
AUKN_SHARED_SOO2_NCM(RequestHeapOfSharedRegion, Heap, kHeapSize, ((const AuSPtr<MemoryViewWrite> &, memory)), const AuSPtr<MemoryViewWrite> &pMemory);
|
|
|
|
/**
|
|
* Proxies an existing heap with encapsulated statistics.
|
|
* This is intended for debugging purposes when accurate heap stats of a heap-subset are desired.
|
|
* @warning this heap cannot guarantee release-on-last-free
|
|
*/
|
|
AUKN_SHARED_SOO2_NCM(HeapProxy, Heap, kHeap2Size, ((const AuSPtr<Heap> &, pHead)), const AuSPtr<Heap> &pHead);
|
|
|
|
/**
|
|
* Proxies an existing heap with encapsulated statistics and leak detector
|
|
* This is intended for debugging purposes when accurate heap stats of a heap-subset are desired.
|
|
* @warning this heap cannot guarantee release-on-last-free
|
|
*/
|
|
AUKN_SHARED_SOO2_NCM(HeapProxyEx, Heap, kHeap2Size, ((const AuSPtr<Heap> &,pHead), (LeakFinderAlloc_f, pfAlloc), (LeakFinderFree_f, pfFree)), const AuSPtr<Heap> &pHead, LeakFinderAlloc_f pfAlloc, LeakFinderFree_f pfFree);
|
|
} |