297 lines
8.1 KiB
C++
297 lines
8.1 KiB
C++
/***
|
|
Copyright (C) 2021 J Reece Wilson (a/k/a "Reece"). All rights reserved.
|
|
|
|
File: Heap.cpp
|
|
Date: 2021-6-12
|
|
Author: Reece
|
|
***/
|
|
#include <Source/RuntimeInternal.hpp>
|
|
#include "Memory.hpp"
|
|
#include "Heap.hpp"
|
|
|
|
#include "mimalloc.h"
|
|
#include "o1heap.hpp"
|
|
|
|
#if defined(AURORA_IS_POSIX_DERIVED)
|
|
#include <sys/mman.h>
|
|
#endif
|
|
|
|
namespace Aurora::Memory
|
|
{
|
|
static AuUInt32 RoundPageUp(AuUInt32 value)
|
|
{
|
|
auto pageMask = HWInfo::GetPageSize() - 1;
|
|
return (value + pageMask) & ~(pageMask);
|
|
}
|
|
|
|
static void *HeapLargeAllocate(AuUInt length)
|
|
{
|
|
length = RoundPageUp(length);
|
|
#if defined(AURORA_IS_MODERNNT_DERIVED)
|
|
return VirtualAlloc(nullptr, length, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
|
|
#elif defined(AURORA_IS_POSIX_DERIVED)
|
|
return mmap(0, length, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
|
|
#else
|
|
// ideally we should page align.
|
|
// i think mimalloc has fast paths with warnings for overly large passthrough allocations. unsure.
|
|
// 32 alignment in the fastest way mimalloc can provide us memory seems adequate
|
|
// it's very easy for mimalloc to seethe at larger allocations, but it does have slowpaths to handle them
|
|
return AuMemory::FAlloc<void *>(length, 32);
|
|
#endif
|
|
}
|
|
|
|
static void HeapLargeFree(void *buffer, AuUInt length)
|
|
{
|
|
length = RoundPageUp(length);
|
|
#if defined(AURORA_IS_MODERNNT_DERIVED)
|
|
VirtualFree(buffer, 0, MEM_RELEASE);
|
|
#elif defined(AURORA_IS_POSIX_DERIVED)
|
|
munmap(buffer, length);
|
|
#else
|
|
AuMemory::Free(buffer);
|
|
mi_collect(false);
|
|
#endif
|
|
}
|
|
|
|
class InternalHeap : public Heap, AuEnableSharedFromThis<InternalHeap>
|
|
{
|
|
public:
|
|
InternalHeap() : base_(nullptr), mutex_(nullptr), heap_(nullptr), count_(0)
|
|
{ }
|
|
|
|
~InternalHeap();
|
|
|
|
bool Init(AuUInt length);
|
|
|
|
typedef struct FragmentHeader
|
|
{
|
|
void *next;
|
|
void *prev;
|
|
size_t size;
|
|
bool used;
|
|
} FragmentHeader;
|
|
|
|
static AuUInt GetHeapSize(const void *ptr)
|
|
{
|
|
return reinterpret_cast<const FragmentHeader *>(ptr)[-1].size;
|
|
}
|
|
|
|
Types::size_t GetChunkSize(const void *head) override;
|
|
|
|
void *_FAlloc(Types::size_t length) override;
|
|
void *_FAlloc(Types::size_t length, Types::size_t align) override;
|
|
void *_ZAlloc(Types::size_t length) override;
|
|
void *_ZAlloc(Types::size_t length, Types::size_t align) override;
|
|
void *_ZRealloc(void *buffer, Types::size_t length) override;
|
|
void *_ZRealloc(void *buffer, Types::size_t length, Types::size_t align) override;
|
|
void *_FRealloc(void *buffer, Types::size_t length) override;
|
|
void *_FRealloc(void *buffer, Types::size_t length, Types::size_t align) override;
|
|
void _Free(void *buffer) override;
|
|
AuSPtr<Heap> GetSelfReference() override;
|
|
|
|
void TryRelease();
|
|
void DecrementUsers();
|
|
void RequestTermination();
|
|
|
|
private:
|
|
AuThreadPrimitives::MutexUnique_t mutex_;
|
|
void *base_ {};
|
|
O1HeapInstance *heap_ {};
|
|
int count_ {};
|
|
AuUInt length_ {};
|
|
bool isDangling_ {};
|
|
};
|
|
|
|
InternalHeap::~InternalHeap()
|
|
{
|
|
SysAssertDbgExp(count_ == 0);
|
|
|
|
if (base_)
|
|
{
|
|
o1HeapReleaseCpp(heap_);// ->~O1HeapInstance(); // TODO: make free func
|
|
|
|
HeapLargeFree(base_, length_);
|
|
base_ = nullptr;
|
|
}
|
|
|
|
mutex_.reset();
|
|
}
|
|
|
|
bool InternalHeap::Init(AuUInt length)
|
|
{
|
|
SysAssert(!base_, "heap already initialized");
|
|
SysAssert(!mutex_, "heap already initialized");
|
|
|
|
SysAssert(length, "invalid heap allocation");
|
|
length_ = length;
|
|
|
|
mutex_ = AuThreadPrimitives::MutexUnique();
|
|
if (!mutex_) return false;
|
|
|
|
base_ = HeapLargeAllocate(length);
|
|
if (!base_) return false;
|
|
|
|
heap_ = o1heapInit(base_, length,
|
|
[this](const O1HeapInstance *const handle) -> void
|
|
{
|
|
SysAssertDbg(this->mutex_, "missing mutex");
|
|
this->mutex_->Lock();
|
|
},
|
|
[this](const O1HeapInstance *const handle) -> void
|
|
{
|
|
SysAssertDbg(this->mutex_, "missing mutex");
|
|
this->mutex_->Unlock();
|
|
}
|
|
);
|
|
|
|
return true;
|
|
}
|
|
|
|
Types::size_t InternalHeap::GetChunkSize(const void *head)
|
|
{
|
|
return InternalHeap::GetHeapSize(head);
|
|
}
|
|
|
|
void *InternalHeap::_FAlloc(Types::size_t length)
|
|
{
|
|
if (!heap_) return nullptr;
|
|
auto ret = o1heapAllocate(heap_, length);
|
|
if (ret) count_++;
|
|
return ret;
|
|
}
|
|
|
|
void *InternalHeap::_FAlloc(Types::size_t length, Types::size_t align)
|
|
{
|
|
SysAssert(align < O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
|
|
return _FAlloc(length);
|
|
}
|
|
|
|
void *InternalHeap::_ZAlloc(Types::size_t length)
|
|
{
|
|
if (!heap_) return nullptr;
|
|
auto ptr = _FAlloc(length);
|
|
if (!ptr) return nullptr;
|
|
AuMemset(ptr, 0, length);
|
|
return ptr;
|
|
}
|
|
|
|
void *InternalHeap::_ZAlloc(Types::size_t length, Types::size_t align)
|
|
{
|
|
SysAssert(align < O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
|
|
return _ZAlloc(length);
|
|
}
|
|
|
|
void *InternalHeap::_ZRealloc(void *buffer, Types::size_t length)
|
|
{
|
|
auto prevLength = GetHeapSize(buffer);
|
|
auto alloc = _ZAlloc(length);
|
|
if (!alloc) return nullptr;
|
|
AuMemcpy(alloc, buffer, AuMin(prevLength, length));
|
|
_Free(buffer);
|
|
return alloc;
|
|
}
|
|
|
|
void *InternalHeap::_ZRealloc(void *buffer, Types::size_t length, Types::size_t align)
|
|
{
|
|
SysAssert(align < O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
|
|
return _ZRealloc(buffer, length);
|
|
}
|
|
|
|
void *InternalHeap::_FRealloc(void *buffer, Types::size_t length)
|
|
{
|
|
auto prevLength = GetHeapSize(buffer);
|
|
auto alloc = _FAlloc(length);
|
|
if (!alloc) return nullptr;
|
|
AuMemcpy(alloc, buffer, AuMin(prevLength, length));
|
|
_Free(buffer);
|
|
return alloc;
|
|
}
|
|
|
|
void *InternalHeap::_FRealloc(void *buffer, Types::size_t length, Types::size_t align)
|
|
{
|
|
SysAssert(align < O1HEAP_ALIGNMENT, "heap wrapping is unsupported, alignment past the supported 2^x alignment is not possible");
|
|
return _FRealloc(buffer, length);
|
|
}
|
|
|
|
void InternalHeap::_Free(void *buffer)
|
|
{
|
|
if (buffer == nullptr) return;
|
|
o1heapFree(heap_, buffer);
|
|
DecrementUsers();
|
|
}
|
|
|
|
void InternalHeap::DecrementUsers()
|
|
{
|
|
if (--count_ == 0)
|
|
{
|
|
AU_LOCK_GUARD(this->mutex_);
|
|
TryRelease();
|
|
}
|
|
}
|
|
|
|
void InternalHeap::TryRelease()
|
|
{
|
|
if (!isDangling_)
|
|
{
|
|
return;
|
|
}
|
|
|
|
if (count_ == 0)
|
|
{
|
|
delete this;
|
|
}
|
|
}
|
|
|
|
void InternalHeap::RequestTermination()
|
|
{
|
|
AU_LOCK_GUARD(this->mutex_);
|
|
|
|
if (count_)
|
|
{
|
|
AuLogWarn("Heap life was less than its allocations, waiting for final free");
|
|
AuLogWarn("Reporting using mayday!");
|
|
Telemetry::Mayday();
|
|
|
|
isDangling_ = true;
|
|
TryRelease();
|
|
}
|
|
else
|
|
{
|
|
delete this;
|
|
}
|
|
}
|
|
|
|
AuSPtr<Heap> InternalHeap::GetSelfReference()
|
|
{
|
|
try
|
|
{
|
|
return AuSharedFromThis();
|
|
}
|
|
catch (...)
|
|
{
|
|
return {};
|
|
}
|
|
}
|
|
|
|
AUKN_SYM Heap *AllocHeapNew(AuUInt size)
|
|
{
|
|
auto heap = _new InternalHeap();
|
|
if (!heap)
|
|
{
|
|
return nullptr;
|
|
}
|
|
|
|
if (!heap->Init(size))
|
|
{
|
|
delete heap;
|
|
return nullptr;
|
|
}
|
|
|
|
return heap;
|
|
}
|
|
|
|
AUKN_SYM void AllocHeapRelease(Heap *heap)
|
|
{
|
|
static_cast<InternalHeap *>(heap)->RequestTermination();
|
|
}
|
|
} |