2016-11-11 15:38:49 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2016 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
2017-11-18 13:31:59 +00:00
|
|
|
#ifndef SkFixedAlloc_DEFINED
|
|
|
|
#define SkFixedAlloc_DEFINED
|
2016-11-11 15:38:49 +00:00
|
|
|
|
2017-02-06 18:03:49 +00:00
|
|
|
#include "SkRefCnt.h"
|
2016-11-11 15:38:49 +00:00
|
|
|
#include "SkTFitsIn.h"
|
|
|
|
#include "SkTypes.h"
|
2017-01-13 16:30:44 +00:00
|
|
|
#include <cstddef>
|
2016-11-11 15:38:49 +00:00
|
|
|
#include <new>
|
2017-01-13 16:30:44 +00:00
|
|
|
#include <type_traits>
|
2016-11-11 15:38:49 +00:00
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
|
|
|
|
2017-01-13 16:30:44 +00:00
|
|
|
// SkArenaAlloc allocates object and destroys the allocated objects when destroyed. It's designed
|
|
|
|
// to minimize the number of underlying block allocations. SkArenaAlloc allocates first out of an
|
|
|
|
// (optional) user-provided block of memory, and when that's exhausted it allocates on the heap,
|
|
|
|
// starting with an allocation of extraSize bytes. If your data (plus a small overhead) fits in
|
|
|
|
// the user-provided block, SkArenaAlloc never uses the heap, and if it fits in extraSize bytes,
|
|
|
|
// it'll use the heap only once. If you pass extraSize = 0, it allocates blocks for each call to
|
|
|
|
// make<T>.
|
|
|
|
//
|
|
|
|
// Examples:
|
|
|
|
//
|
|
|
|
// char block[mostCasesSize];
|
|
|
|
// SkArenaAlloc arena(block, almostAllCasesSize);
|
|
|
|
//
|
|
|
|
// If mostCasesSize is too large for the stack, you can use the following pattern.
|
|
|
|
//
|
|
|
|
// std::unique_ptr<char[]> block{new char[mostCasesSize]};
|
|
|
|
// SkArenaAlloc arena(block.get(), mostCasesSize, almostAllCasesSize);
|
|
|
|
//
|
|
|
|
// If the program only sometimes allocates memory, use the following.
|
|
|
|
//
|
|
|
|
// SkArenaAlloc arena(nullptr, 0, almostAllCasesSize);
|
|
|
|
//
|
|
|
|
// The storage does not necessarily need to be on the stack. Embedding the storage in a class also
|
|
|
|
// works.
|
|
|
|
//
|
|
|
|
// class Foo {
|
|
|
|
// char storage[mostCasesSize];
|
|
|
|
// SkArenaAlloc arena (storage, almostAllCasesSize);
|
|
|
|
// };
|
|
|
|
//
|
|
|
|
// In addition, the system is optimized to handle POD data including arrays of PODs (where
|
|
|
|
// POD is really data with no destructors). For POD data it has zero overhead per item, and a
|
|
|
|
// typical block overhead of 8 bytes. For non-POD objects there is a per item overhead of 4 bytes.
|
|
|
|
// For arrays of non-POD objects there is a per array overhead of typically 8 bytes. There is an
|
|
|
|
// addition overhead when switching from POD data to non-POD data of typically 8 bytes.
|
2017-03-03 20:09:43 +00:00
|
|
|
//
|
2017-04-27 19:22:02 +00:00
|
|
|
// You can track memory use by adding SkArenaAlloc::kTrack as the last parameter to any constructor.
|
|
|
|
//
|
|
|
|
// char storage[someNumber];
|
|
|
|
// SkArenaAlloc alloc{storage, SkArenaAlloc::kTrack};
|
|
|
|
//
|
|
|
|
// This will print out a line for every destructor or reset call that has the total memory
|
|
|
|
// allocated, the total slop (the unused portion of a block), and the slop of the last block.
|
|
|
|
//
|
2017-03-03 20:09:43 +00:00
|
|
|
// If additional blocks are needed they are increased exponentially. This strategy bounds the
|
2017-03-08 19:17:49 +00:00
|
|
|
// recursion of the RunDtorsOnBlock to be limited to O(log size-of-memory). Block size grow using
|
|
|
|
// the Fibonacci sequence which means that for 2^32 memory there are 48 allocations, and for 2^48
|
|
|
|
// there are 71 allocations.
|
2017-01-13 16:30:44 +00:00
|
|
|
class SkArenaAlloc {
|
|
|
|
public:
|
2017-04-27 19:22:02 +00:00
|
|
|
enum Tracking {kDontTrack, kTrack};
|
|
|
|
SkArenaAlloc(char* block, size_t size, size_t, Tracking tracking = kDontTrack);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
2017-04-27 19:22:02 +00:00
|
|
|
SkArenaAlloc(size_t extraSize, Tracking tracking = kDontTrack)
|
|
|
|
: SkArenaAlloc(nullptr, 0, extraSize, tracking)
|
2017-01-20 21:58:06 +00:00
|
|
|
{}
|
|
|
|
|
2017-01-13 16:30:44 +00:00
|
|
|
~SkArenaAlloc();
|
|
|
|
|
|
|
|
template <typename T, typename... Args>
|
|
|
|
T* make(Args&&... args) {
|
2017-02-17 15:45:47 +00:00
|
|
|
uint32_t size = SkTo<uint32_t>(sizeof(T));
|
|
|
|
uint32_t alignment = SkTo<uint32_t>(alignof(T));
|
2017-01-13 16:30:44 +00:00
|
|
|
char* objStart;
|
|
|
|
if (skstd::is_trivially_destructible<T>::value) {
|
2017-02-17 15:45:47 +00:00
|
|
|
objStart = this->allocObject(size, alignment);
|
|
|
|
fCursor = objStart + size;
|
2017-01-13 16:30:44 +00:00
|
|
|
} else {
|
2017-02-17 15:45:47 +00:00
|
|
|
objStart = this->allocObjectWithFooter(size + sizeof(Footer), alignment);
|
2017-01-25 22:05:05 +00:00
|
|
|
// Can never be UB because max value is alignof(T).
|
|
|
|
uint32_t padding = SkTo<uint32_t>(objStart - fCursor);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
|
|
|
// Advance to end of object to install footer.
|
2017-02-17 15:45:47 +00:00
|
|
|
fCursor = objStart + size;
|
2017-01-13 16:30:44 +00:00
|
|
|
FooterAction* releaser = [](char* objEnd) {
|
|
|
|
char* objStart = objEnd - (sizeof(T) + sizeof(Footer));
|
|
|
|
((T*)objStart)->~T();
|
|
|
|
return objStart;
|
|
|
|
};
|
|
|
|
this->installFooter(releaser, padding);
|
|
|
|
}
|
|
|
|
|
|
|
|
// This must be last to make objects with nested use of this allocator work.
|
|
|
|
return new(objStart) T(std::forward<Args>(args)...);
|
|
|
|
}
|
|
|
|
|
2017-02-06 18:03:49 +00:00
|
|
|
template <typename T, typename... Args>
|
|
|
|
sk_sp<T> makeSkSp(Args&&... args) {
|
|
|
|
SkASSERT(SkTFitsIn<uint32_t>(sizeof(T)));
|
|
|
|
|
|
|
|
// The arena takes a ref for itself to account for the destructor. The sk_sp count can't
|
|
|
|
// become zero or the sk_sp will try to call free on the pointer.
|
|
|
|
return sk_sp<T>(SkRef(this->make<T>(std::forward<Args>(args)...)));
|
|
|
|
}
|
|
|
|
|
2017-01-13 16:30:44 +00:00
|
|
|
template <typename T>
|
|
|
|
T* makeArrayDefault(size_t count) {
|
2017-02-17 15:45:47 +00:00
|
|
|
uint32_t safeCount = SkTo<uint32_t>(count);
|
|
|
|
T* array = (T*)this->commonArrayAlloc<T>(safeCount);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
|
|
|
// If T is primitive then no initialization takes place.
|
2017-02-17 15:45:47 +00:00
|
|
|
for (size_t i = 0; i < safeCount; i++) {
|
2017-01-13 16:30:44 +00:00
|
|
|
new (&array[i]) T;
|
|
|
|
}
|
|
|
|
return array;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
T* makeArray(size_t count) {
|
2017-02-17 15:45:47 +00:00
|
|
|
uint32_t safeCount = SkTo<uint32_t>(count);
|
|
|
|
T* array = (T*)this->commonArrayAlloc<T>(safeCount);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
|
|
|
// If T is primitive then the memory is initialized. For example, an array of chars will
|
|
|
|
// be zeroed.
|
2017-02-17 15:45:47 +00:00
|
|
|
for (size_t i = 0; i < safeCount; i++) {
|
2017-01-13 16:30:44 +00:00
|
|
|
new (&array[i]) T();
|
|
|
|
}
|
|
|
|
return array;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Destroy all allocated objects, free any heap allocations.
|
|
|
|
void reset();
|
|
|
|
|
|
|
|
private:
|
2017-01-24 20:59:51 +00:00
|
|
|
using Footer = int64_t;
|
2017-01-13 16:30:44 +00:00
|
|
|
using FooterAction = char* (char*);
|
|
|
|
|
2017-01-19 19:28:49 +00:00
|
|
|
static char* SkipPod(char* footerEnd);
|
|
|
|
static void RunDtorsOnBlock(char* footerEnd);
|
|
|
|
static char* NextBlock(char* footerEnd);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
2017-01-25 22:05:05 +00:00
|
|
|
void installFooter(FooterAction* releaser, uint32_t padding);
|
|
|
|
void installUint32Footer(FooterAction* action, uint32_t value, uint32_t padding);
|
|
|
|
void installPtrFooter(FooterAction* action, char* ptr, uint32_t padding);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
2017-02-17 15:45:47 +00:00
|
|
|
void ensureSpace(uint32_t size, uint32_t alignment);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
2017-05-24 16:19:02 +00:00
|
|
|
char* allocObject(uint32_t size, uint32_t alignment) {
|
|
|
|
uintptr_t mask = alignment - 1;
|
2017-08-15 21:30:58 +00:00
|
|
|
uintptr_t alignedOffset = (~reinterpret_cast<uintptr_t>(fCursor) + 1) & mask;
|
|
|
|
uintptr_t totalSize = size + alignedOffset;
|
|
|
|
if (totalSize < size) {
|
|
|
|
SK_ABORT("The total size of allocation overflowed uintptr_t.");
|
|
|
|
}
|
|
|
|
if (totalSize > static_cast<uintptr_t>(fEnd - fCursor)) {
|
2017-05-24 16:19:02 +00:00
|
|
|
this->ensureSpace(size, alignment);
|
2017-08-15 21:30:58 +00:00
|
|
|
alignedOffset = (~reinterpret_cast<uintptr_t>(fCursor) + 1) & mask;
|
2017-05-24 16:19:02 +00:00
|
|
|
}
|
2017-08-15 21:30:58 +00:00
|
|
|
return fCursor + alignedOffset;
|
2017-05-24 16:19:02 +00:00
|
|
|
}
|
2017-01-13 16:30:44 +00:00
|
|
|
|
2017-02-17 15:45:47 +00:00
|
|
|
char* allocObjectWithFooter(uint32_t sizeIncludingFooter, uint32_t alignment);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
|
|
|
template <typename T>
|
2017-02-17 15:45:47 +00:00
|
|
|
char* commonArrayAlloc(uint32_t count) {
|
2017-01-13 16:30:44 +00:00
|
|
|
char* objStart;
|
2017-07-24 20:11:31 +00:00
|
|
|
SkASSERT_RELEASE(count <= std::numeric_limits<uint32_t>::max() / sizeof(T));
|
2017-02-17 15:45:47 +00:00
|
|
|
uint32_t arraySize = SkTo<uint32_t>(count * sizeof(T));
|
|
|
|
uint32_t alignment = SkTo<uint32_t>(alignof(T));
|
2017-01-13 16:30:44 +00:00
|
|
|
|
|
|
|
if (skstd::is_trivially_destructible<T>::value) {
|
2017-02-17 15:45:47 +00:00
|
|
|
objStart = this->allocObject(arraySize, alignment);
|
2017-01-13 16:30:44 +00:00
|
|
|
fCursor = objStart + arraySize;
|
|
|
|
} else {
|
2017-07-24 20:11:31 +00:00
|
|
|
constexpr uint32_t overhead = sizeof(Footer) + sizeof(uint32_t);
|
|
|
|
SkASSERT_RELEASE(arraySize <= std::numeric_limits<uint32_t>::max() - overhead);
|
|
|
|
uint32_t totalSize = arraySize + overhead;
|
2017-02-17 15:45:47 +00:00
|
|
|
objStart = this->allocObjectWithFooter(totalSize, alignment);
|
2017-01-25 22:05:05 +00:00
|
|
|
|
|
|
|
// Can never be UB because max value is alignof(T).
|
|
|
|
uint32_t padding = SkTo<uint32_t>(objStart - fCursor);
|
2017-01-13 16:30:44 +00:00
|
|
|
|
|
|
|
// Advance to end of array to install footer.?
|
|
|
|
fCursor = objStart + arraySize;
|
2017-01-19 19:28:49 +00:00
|
|
|
this->installUint32Footer(
|
|
|
|
[](char* footerEnd) {
|
|
|
|
char* objEnd = footerEnd - (sizeof(Footer) + sizeof(uint32_t));
|
|
|
|
uint32_t count;
|
|
|
|
memmove(&count, objEnd, sizeof(uint32_t));
|
|
|
|
char* objStart = objEnd - count * sizeof(T);
|
|
|
|
T* array = (T*) objStart;
|
|
|
|
for (uint32_t i = 0; i < count; i++) {
|
|
|
|
array[i].~T();
|
|
|
|
}
|
|
|
|
return objStart;
|
|
|
|
},
|
|
|
|
SkTo<uint32_t>(count),
|
|
|
|
padding);
|
2017-01-13 16:30:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return objStart;
|
|
|
|
}
|
|
|
|
|
2017-02-17 15:45:47 +00:00
|
|
|
char* fDtorCursor;
|
|
|
|
char* fCursor;
|
|
|
|
char* fEnd;
|
|
|
|
char* const fFirstBlock;
|
|
|
|
const uint32_t fFirstSize;
|
|
|
|
const uint32_t fExtraSize;
|
2017-04-27 19:22:02 +00:00
|
|
|
|
|
|
|
// Track some useful stats. Track stats if fTotalSlop is >= 0;
|
|
|
|
uint32_t fTotalAlloc { 0};
|
|
|
|
int32_t fTotalSlop {-1};
|
|
|
|
|
2017-03-08 19:17:49 +00:00
|
|
|
// Use the Fibonacci sequence as the growth factor for block size. The size of the block
|
|
|
|
// allocated is fFib0 * fExtraSize. Using 2 ^ n * fExtraSize had too much slop for Android.
|
|
|
|
uint32_t fFib0 {1}, fFib1 {1};
|
2017-01-13 16:30:44 +00:00
|
|
|
};
|
|
|
|
|
2017-05-24 18:53:44 +00:00
|
|
|
// Helper for defining allocators with inline/reserved storage.
|
|
|
|
// For argument declarations, stick to the base type (SkArenaAlloc).
|
|
|
|
template <size_t InlineStorageSize>
|
|
|
|
class SkSTArenaAlloc : public SkArenaAlloc {
|
|
|
|
public:
|
|
|
|
explicit SkSTArenaAlloc(size_t extraSize = InlineStorageSize, Tracking tracking = kDontTrack)
|
|
|
|
: INHERITED(fInlineStorage, InlineStorageSize, extraSize, tracking) {}
|
|
|
|
|
|
|
|
private:
|
|
|
|
char fInlineStorage[InlineStorageSize];
|
|
|
|
|
|
|
|
using INHERITED = SkArenaAlloc;
|
|
|
|
};
|
|
|
|
|
2017-11-18 13:31:59 +00:00
|
|
|
#endif//SkFixedAlloc_DEFINED
|