[+] HWInfo::GetPageSize()

[+] Parse::SplitNewlines(..., ..., true) where the return value is the remaining unbuffered line
[*] Gross hack we should remove to drop std string parse exception abuse logs
[*] Update AuroraForEach and AuroraInterfaces
[*] Experiment with using alternative os address space reserve + commit mechanics for Memory::Heaps
[*] global fast rand device should be seeded with at least 64 bits of secure rng data. ideally, we should max out entropy with secure bits, but we dont
This commit is contained in:
Reece Wilson 2021-10-14 12:35:05 +01:00
parent fc16065d5e
commit a626fbea24
8 changed files with 820 additions and 686 deletions

View File

@ -18,4 +18,6 @@ namespace Aurora::HWInfo
AUKN_SYM AuOptional<RamStat> GetMemStatProcess();
AUKN_SYM AuOptional<RamStat> GetMemStatSystem();
AUKN_SYM AuOptional<RamStat> GetMemStatStartup();
AUKN_SYM AuUInt32 GetPageSize();
}

View File

@ -9,14 +9,14 @@
namespace Aurora::Parse
{
static void SplitNewlines(const AuString &in, std::function<void(const AuString &)> lineCallback)
static AuString SplitNewlines(const AuString &in, std::function<void(const AuString &)> lineCallback, bool returnRemaining)
{
AuMach index = 0, startIdx = 0;
while ((index = in.find("\n", startIdx)) != AuString::npos)
{
auto line = in.substr(startIdx, index - startIdx);
startIdx = index + 1;
startIdx = index + 1;
if (line[line.size() - 1] == '\r')
{
@ -31,7 +31,20 @@ namespace Aurora::Parse
}
}
lineCallback(in.substr(startIdx));
if (returnRemaining)
{
return in.substr(startIdx);
}
else
{
lineCallback(in.substr(startIdx));
return {};
}
}
static void SplitNewlines(const AuString &in, std::function<void(const AuString &)> lineCallback)
{
SplitNewlines(in, lineCallback, false);
}
static AuList<AuString> SplitString(const AuString &in, AuUInt16 characters)

File diff suppressed because it is too large Load Diff

View File

@ -50,7 +50,7 @@
#define AUI_METHOD_PROTOTYPE_ASSIGN(ret, name, params) \
name ## Functional(name ## Functional)
#define AUI_METHOD_FUNCTIONAL_ASSIGN_A(ret, name, params) AUI_METHOD_PROTOTYPE_ASSIGN(ret, name, params)
#define AUI_METHOD_FUNCTIONAL_ASSIGN_A(ret, name, params) : AUI_METHOD_PROTOTYPE_ASSIGN(ret, name, params)
#define AUI_METHOD_FUNCTIONAL_ASSIGN_B(ret, name, params) , AUI_METHOD_PROTOTYPE_ASSIGN(ret, name, params)
#define AUI_METHOD_FUNCTIONAL_FWD(ret, name, params) \
@ -67,10 +67,10 @@
AU_FOR_EACH_3(AUI_METHOD_FUNCTIONAL_TYPEDEF, __VA_ARGS__) \
inline name ## Functional () {} \
inline ~name ## Functional () {} \
inline name ## Functional (AU_FOR_EACH_FIRST_3(AUI_METHOD_FUNCTIONAL_CTOR_PARAMS_BASE_A, AUI_METHOD_FUNCTIONAL_CTOR_PARAMS_BASE_B, __VA_ARGS__)) : \
inline name ## Functional (AU_FOR_EACH_FIRST_3(AUI_METHOD_FUNCTIONAL_CTOR_PARAMS_BASE_A, AUI_METHOD_FUNCTIONAL_CTOR_PARAMS_BASE_B, __VA_ARGS__)) \
AU_FOR_EACH_FIRST_3(AUI_METHOD_FUNCTIONAL_ASSIGN_A, AUI_METHOD_FUNCTIONAL_ASSIGN_B, __VA_ARGS__) \
{} \
inline name ## Functional (AU_FOR_EACH_FIRST_3(AUI_METHOD_FUNCTIONAL_CTOR_PARAMS_REF_A, AUI_METHOD_FUNCTIONAL_CTOR_PARAMS_REF_B, __VA_ARGS__)) : \
inline name ## Functional (AU_FOR_EACH_FIRST_3(AUI_METHOD_FUNCTIONAL_CTOR_PARAMS_REF_A, AUI_METHOD_FUNCTIONAL_CTOR_PARAMS_REF_B, __VA_ARGS__)) \
AU_FOR_EACH_FIRST_3(AUI_METHOD_FUNCTIONAL_ASSIGN_A, AUI_METHOD_FUNCTIONAL_ASSIGN_B, __VA_ARGS__) \
{} \
AU_FOR_EACH_3(tmpl, __VA_ARGS__) \

View File

@ -96,7 +96,7 @@ namespace Aurora::Debug
frameCurrent.label = pSymbol->Name;
}
#if defined(DEBUG)
#if defined(DEBUG) || defined(STAGING)
IMAGEHLP_LINE64 line;
DWORD disp;
@ -263,6 +263,31 @@ namespace Aurora::Debug
}
}
}
else if (strnicmp(descriptor->name, ".?AV", AuArraySize(".?AV") - 1) == 0)
{
/* Annoying
https://blog.quarkslab.com/visual-c-rtti-inspection.html
This structure is very important to identify an object since it contains its VFT (field pVFTable) and its mangled name. That's why it usually starts with ".?AV", which means "a C++ class". These structures are stored in the section ".data".
We decided to do pattern matching on ".?AV" to get the field name of _TypeInformation and thus retrieves the RTTICompleteObjectLocator.
... we would then have to traverse the hierarchy to determine the root most signature (std::exceptions vtable hash)
TODO(Reece): fix me, this is evil and shouldn't make it into the wild
Fix before 1.0
*/
auto exception = reinterpret_cast<std::exception *>(ExceptionInfo->ExceptionRecord->ExceptionInformation[1]);
if (IsReadable(exception))
{
entry.wincxx.str = exception->what();
}
}
else if (strlen(descriptor->name) == 0)
{
auto exception = reinterpret_cast<std::exception *>(ExceptionInfo->ExceptionRecord->ExceptionInformation[1]);
if (IsReadable(exception))
{
entry.wincxx.str = exception->what();
}
}
}
}
@ -279,6 +304,14 @@ namespace Aurora::Debug
entry.wincxx.str = std::to_string(ExceptionInfo->ExceptionRecord->ExceptionCode);
}
}
else
{
if (entry.wincxx.str.find("invalid sto") != std::string::npos)
{
gDebugLocked = 0;
return EXCEPTION_CONTINUE_SEARCH;
}
}
try
{

View File

@ -29,6 +29,7 @@
namespace Aurora::HWInfo
{
static AuOptional<RamStat> gMemStartup;
static AuUInt32 gPageSize;
AUKN_SYM AuOptional<RamStat> GetMemStatProcess()
{
@ -121,8 +122,33 @@ namespace Aurora::HWInfo
return gMemStartup;
}
AUKN_SYM AuUInt32 GetPageSize()
{
return gPageSize;
}
static void SetPageSize()
{
#if defined(AURORA_IS_MODERNNT_DERIVED)
SYSTEM_INFO info;
GetSystemInfo(&info);
gPageSize = info.dwPageSize;
#elif defined(AURORA_IS_POSIX_DERIVED)
gPageSize = getpagesize();
#else
gPageSize = 4096;
#endif
}
void InitRamInfo()
{
gMemStartup = GetMemStatSystem();
SetPageSize();
}
}

View File

@ -14,6 +14,40 @@
namespace Aurora::Memory
{
static AuUInt32 RoundPageUp(AuUInt32 value)
{
auto pageMask = HWInfo::GetPageSize() - 1;
return (value + pageMask) & ~(pageMask);
}
static void *HeapLargeAllocate(AuUInt length)
{
length = RoundPageUp(length);
#if defined(AURORA_IS_MODERNNT_DERIVED)
return VirtualAlloc(nullptr, length, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
#elif defined(AURORA_IS_POSIX_DERIVED)
return mmap(0, length, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
#else
// ideally we should page align.
// i think mimalloc has fast paths with warnings for overly large passthrough allocations. unsure.
// 32 alignment in the fastest way mimalloc can provide us memory seems adequate
return Memory::FAlloc<void *>(length, 32);
#endif
}
static void HeapLargeFree(void *buffer, AuUInt length)
{
length = RoundPageUp(length);
#if defined(AURORA_IS_MODERNNT_DERIVED)
VirtualFree(buffer, 0, MEM_RELEASE);
#elif defined(AURORA_IS_POSIX_DERIVED)
munmap(buffer, length);
#else
Memory::Free(buffer);
mi_collect(false);
#endif
}
class InternalHeap : public Heap
{
public:
@ -57,6 +91,7 @@ namespace Aurora::Memory
void *base_ {};
O1HeapInstance *heap_ {};
int _count {};
AuUInt length_;
bool _isDangling {};
};
@ -68,9 +103,8 @@ namespace Aurora::Memory
{
o1HeapReleaseCpp(heap_);// ->~O1HeapInstance(); // TODO: make free func
Memory::Free(base_);
HeapLargeFree(base_, length_);
base_ = nullptr;
mi_collect(false);
}
mutex_.reset();
@ -82,6 +116,7 @@ namespace Aurora::Memory
SysAssert(!mutex_, "heap already initialized");
SysAssert(length, "invalid heap allocation");
length_ = length;
mutex_ = Threading::Primitives::MutexUnique();
if (!mutex_) return false;
@ -90,16 +125,18 @@ namespace Aurora::Memory
if (!base_) return false;
heap_ = o1heapInit(base_, length,
[this](const O1HeapInstance *const handle) -> void
{
SysAssertDbg(this->mutex_ ? true : false, "missing mutex");
this->mutex_->Lock();
},
[this](const O1HeapInstance *const handle) -> void
{
SysAssertDbg(this->mutex_ ? true : false, "missing mutex");
this->mutex_->Unlock();
});
[this](const O1HeapInstance *const handle) -> void
{
SysAssertDbg(this->mutex_ ? true : false, "missing mutex");
this->mutex_->Lock();
},
[this](const O1HeapInstance *const handle) -> void
{
SysAssertDbg(this->mutex_ ? true : false, "missing mutex");
this->mutex_->Unlock();
}
);
return true;
}

View File

@ -258,7 +258,7 @@ namespace Aurora::RNG
static void InitFastRng()
{
gWellRand = WELL_SeedRand(RngTmpl<false, AuUInt32>());
gWellRand = WELL_SeedRand64(RngTmpl<false, AuUInt64>());
gFastDevice = RandomUnique(RandomDef {false});
}