2011-07-28 14:26:00 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/*
|
2011-07-28 14:26:00 +00:00
|
|
|
* Copyright 2006 The Android Open Source Project
|
2008-12-17 15:59:43 +00:00
|
|
|
*
|
2011-07-28 14:26:00 +00:00
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
2008-12-17 15:59:43 +00:00
|
|
|
*/
|
|
|
|
|
2011-07-28 14:26:00 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
#ifndef SkTemplates_DEFINED
|
|
|
|
#define SkTemplates_DEFINED
|
|
|
|
|
2015-01-07 21:28:08 +00:00
|
|
|
#include "SkMath.h"
|
2017-03-27 17:35:15 +00:00
|
|
|
#include "SkMalloc.h"
|
2015-09-07 19:45:52 +00:00
|
|
|
#include "SkTLogic.h"
|
2008-12-17 15:59:43 +00:00
|
|
|
#include "SkTypes.h"
|
2014-03-12 21:41:06 +00:00
|
|
|
#include <limits.h>
|
2016-03-16 17:28:35 +00:00
|
|
|
#include <memory>
|
2012-10-31 20:53:49 +00:00
|
|
|
#include <new>
|
2008-12-17 15:59:43 +00:00
|
|
|
|
|
|
|
/** \file SkTemplates.h
|
|
|
|
|
|
|
|
This file contains light-weight template classes for type-safe and exception-safe
|
|
|
|
resource management.
|
|
|
|
*/
|
|
|
|
|
2013-02-04 15:58:08 +00:00
|
|
|
/**
|
|
|
|
* Marks a local variable as known to be unused (to avoid warnings).
|
|
|
|
* Note that this does *not* prevent the local variable from being optimized away.
|
|
|
|
*/
|
|
|
|
template<typename T> inline void sk_ignore_unused_variable(const T&) { }
|
|
|
|
|
2013-05-07 15:28:15 +00:00
|
|
|
/**
|
|
|
|
* Returns a pointer to a D which comes immediately after S[count].
|
|
|
|
*/
|
|
|
|
template <typename D, typename S> static D* SkTAfter(S* ptr, size_t count = 1) {
|
|
|
|
return reinterpret_cast<D*>(ptr + count);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns a pointer to a D which comes byteOffset bytes after S.
|
|
|
|
*/
|
|
|
|
template <typename D, typename S> static D* SkTAddOffset(S* ptr, size_t byteOffset) {
|
2015-08-28 14:09:20 +00:00
|
|
|
// The intermediate char* has the same cv-ness as D as this produces better error messages.
|
2013-05-07 15:28:15 +00:00
|
|
|
// This relies on the fact that reinterpret_cast can add constness, but cannot remove it.
|
2015-08-28 14:09:20 +00:00
|
|
|
return reinterpret_cast<D*>(reinterpret_cast<sknonstd::same_cv_t<char, D>*>(ptr) + byteOffset);
|
2013-05-07 15:28:15 +00:00
|
|
|
}
|
|
|
|
|
2015-09-07 19:45:52 +00:00
|
|
|
template <typename R, typename T, R (*P)(T*)> struct SkFunctionWrapper {
|
|
|
|
R operator()(T* t) { return P(t); }
|
|
|
|
};
|
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/** \class SkAutoTCallVProc
|
|
|
|
|
|
|
|
Call a function when this goes out of scope. The template uses two
|
|
|
|
parameters, the object, and a function that is to be called in the destructor.
|
2016-03-16 20:53:35 +00:00
|
|
|
If release() is called, the object reference is set to null. If the object
|
2008-12-17 15:59:43 +00:00
|
|
|
reference is null when the destructor is called, we do not call the
|
|
|
|
function.
|
|
|
|
*/
|
2016-01-25 03:49:24 +00:00
|
|
|
template <typename T, void (*P)(T*)> class SkAutoTCallVProc
|
2016-03-16 17:28:35 +00:00
|
|
|
: public std::unique_ptr<T, SkFunctionWrapper<void, T, P>> {
|
2008-12-17 15:59:43 +00:00
|
|
|
public:
|
2016-03-16 17:28:35 +00:00
|
|
|
SkAutoTCallVProc(T* obj): std::unique_ptr<T, SkFunctionWrapper<void, T, P>>(obj) {}
|
2015-09-07 19:45:52 +00:00
|
|
|
|
2016-01-25 03:49:24 +00:00
|
|
|
operator T*() const { return this->get(); }
|
2008-12-17 15:59:43 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
/** \class SkAutoTCallIProc
|
|
|
|
|
|
|
|
Call a function when this goes out of scope. The template uses two
|
|
|
|
parameters, the object, and a function that is to be called in the destructor.
|
2016-03-16 20:53:35 +00:00
|
|
|
If release() is called, the object reference is set to null. If the object
|
2008-12-17 15:59:43 +00:00
|
|
|
reference is null when the destructor is called, we do not call the
|
|
|
|
function.
|
|
|
|
*/
|
2016-01-25 03:49:24 +00:00
|
|
|
template <typename T, int (*P)(T*)> class SkAutoTCallIProc
|
2016-03-16 17:28:35 +00:00
|
|
|
: public std::unique_ptr<T, SkFunctionWrapper<int, T, P>> {
|
2008-12-17 15:59:43 +00:00
|
|
|
public:
|
2016-03-16 17:28:35 +00:00
|
|
|
SkAutoTCallIProc(T* obj): std::unique_ptr<T, SkFunctionWrapper<int, T, P>>(obj) {}
|
2015-02-11 15:18:51 +00:00
|
|
|
|
2016-01-25 03:49:24 +00:00
|
|
|
operator T*() const { return this->get(); }
|
2008-12-17 15:59:43 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
/** Allocate an array of T elements, and free the array in the destructor
|
|
|
|
*/
|
2018-04-24 17:58:37 +00:00
|
|
|
template <typename T> class SkAutoTArray {
|
2008-12-17 15:59:43 +00:00
|
|
|
public:
|
2018-04-24 17:58:37 +00:00
|
|
|
SkAutoTArray() {}
|
2008-12-17 15:59:43 +00:00
|
|
|
/** Allocate count number of T elements
|
|
|
|
*/
|
2012-08-14 15:10:09 +00:00
|
|
|
explicit SkAutoTArray(int count) {
|
|
|
|
SkASSERT(count >= 0);
|
|
|
|
if (count) {
|
2018-04-24 17:58:37 +00:00
|
|
|
fArray.reset(new T[count]);
|
2012-08-14 15:10:09 +00:00
|
|
|
}
|
|
|
|
SkDEBUGCODE(fCount = count;)
|
|
|
|
}
|
|
|
|
|
2018-04-24 17:58:37 +00:00
|
|
|
SkAutoTArray(SkAutoTArray&& other) : fArray(std::move(other.fArray)) {
|
|
|
|
SkDEBUGCODE(fCount = other.fCount; other.fCount = 0;)
|
|
|
|
}
|
|
|
|
SkAutoTArray& operator=(SkAutoTArray&& other) {
|
|
|
|
if (this != &other) {
|
|
|
|
fArray = std::move(other.fArray);
|
|
|
|
SkDEBUGCODE(fCount = other.fCount; other.fCount = 0;)
|
2008-12-17 15:59:43 +00:00
|
|
|
}
|
2018-04-24 17:58:37 +00:00
|
|
|
return *this;
|
2008-12-17 15:59:43 +00:00
|
|
|
}
|
|
|
|
|
2018-04-24 17:58:37 +00:00
|
|
|
/** Reallocates given a new count. Reallocation occurs even if new count equals old count.
|
|
|
|
*/
|
|
|
|
void reset(int count) { *this = SkAutoTArray(count); }
|
2008-12-17 15:59:43 +00:00
|
|
|
|
|
|
|
/** Return the array of T elements. Will be NULL if count == 0
|
|
|
|
*/
|
2018-04-24 17:58:37 +00:00
|
|
|
T* get() const { return fArray.get(); }
|
2012-08-23 18:09:54 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/** Return the nth element in the array
|
|
|
|
*/
|
|
|
|
T& operator[](int index) const {
|
2012-08-14 15:10:09 +00:00
|
|
|
SkASSERT((unsigned)index < (unsigned)fCount);
|
2008-12-17 15:59:43 +00:00
|
|
|
return fArray[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2018-04-24 17:58:37 +00:00
|
|
|
std::unique_ptr<T[]> fArray;
|
|
|
|
SkDEBUGCODE(int fCount = 0;)
|
2008-12-17 15:59:43 +00:00
|
|
|
};
|
|
|
|
|
2016-02-05 15:02:38 +00:00
|
|
|
/** Wraps SkAutoTArray, with room for kCountRequested elements preallocated.
|
2008-12-17 15:59:43 +00:00
|
|
|
*/
|
2016-02-05 15:02:38 +00:00
|
|
|
template <int kCountRequested, typename T> class SkAutoSTArray : SkNoncopyable {
|
2008-12-17 15:59:43 +00:00
|
|
|
public:
|
2013-06-13 15:13:46 +00:00
|
|
|
/** Initialize with no objects */
|
|
|
|
SkAutoSTArray() {
|
2017-08-28 14:34:05 +00:00
|
|
|
fArray = nullptr;
|
2013-06-13 15:13:46 +00:00
|
|
|
fCount = 0;
|
|
|
|
}
|
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/** Allocate count number of T elements
|
|
|
|
*/
|
2013-10-14 21:53:24 +00:00
|
|
|
SkAutoSTArray(int count) {
|
2017-08-28 14:34:05 +00:00
|
|
|
fArray = nullptr;
|
2013-06-13 15:13:46 +00:00
|
|
|
fCount = 0;
|
|
|
|
this->reset(count);
|
2008-12-17 15:59:43 +00:00
|
|
|
}
|
2012-08-23 18:09:54 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
~SkAutoSTArray() {
|
2013-06-13 15:13:46 +00:00
|
|
|
this->reset(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
/** Destroys previous objects in the array and default constructs count number of objects */
|
2013-10-14 21:53:24 +00:00
|
|
|
void reset(int count) {
|
2013-05-29 20:10:25 +00:00
|
|
|
T* start = fArray;
|
|
|
|
T* iter = start + fCount;
|
|
|
|
while (iter > start) {
|
|
|
|
(--iter)->~T();
|
|
|
|
}
|
2013-06-13 15:13:46 +00:00
|
|
|
|
2016-06-23 19:55:14 +00:00
|
|
|
SkASSERT(count >= 0);
|
2013-06-13 15:13:46 +00:00
|
|
|
if (fCount != count) {
|
2016-02-05 15:02:38 +00:00
|
|
|
if (fCount > kCount) {
|
2013-07-12 18:44:23 +00:00
|
|
|
// 'fArray' was allocated last time so free it now
|
|
|
|
SkASSERT((T*) fStorage != fArray);
|
2013-06-13 15:13:46 +00:00
|
|
|
sk_free(fArray);
|
|
|
|
}
|
|
|
|
|
2016-02-05 15:02:38 +00:00
|
|
|
if (count > kCount) {
|
2018-01-05 16:20:10 +00:00
|
|
|
fArray = (T*) sk_malloc_throw(count, sizeof(T));
|
2013-06-13 15:13:46 +00:00
|
|
|
} else if (count > 0) {
|
|
|
|
fArray = (T*) fStorage;
|
|
|
|
} else {
|
2017-08-28 14:34:05 +00:00
|
|
|
fArray = nullptr;
|
2013-06-13 15:13:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fCount = count;
|
|
|
|
}
|
|
|
|
|
|
|
|
iter = fArray;
|
|
|
|
T* stop = fArray + count;
|
|
|
|
while (iter < stop) {
|
2015-08-26 20:07:48 +00:00
|
|
|
new (iter++) T;
|
2008-12-17 15:59:43 +00:00
|
|
|
}
|
|
|
|
}
|
2012-08-23 18:09:54 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/** Return the number of T elements in the array
|
|
|
|
*/
|
2013-10-14 21:53:24 +00:00
|
|
|
int count() const { return fCount; }
|
2012-08-23 18:09:54 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/** Return the array of T elements. Will be NULL if count == 0
|
|
|
|
*/
|
|
|
|
T* get() const { return fArray; }
|
2012-08-23 18:09:54 +00:00
|
|
|
|
2017-01-18 16:01:10 +00:00
|
|
|
T* begin() { return fArray; }
|
|
|
|
|
|
|
|
const T* begin() const { return fArray; }
|
|
|
|
|
|
|
|
T* end() { return fArray + fCount; }
|
|
|
|
|
|
|
|
const T* end() const { return fArray + fCount; }
|
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/** Return the nth element in the array
|
|
|
|
*/
|
|
|
|
T& operator[](int index) const {
|
2013-10-14 21:53:24 +00:00
|
|
|
SkASSERT(index < fCount);
|
2008-12-17 15:59:43 +00:00
|
|
|
return fArray[index];
|
|
|
|
}
|
2012-08-23 18:09:54 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
private:
|
2017-12-19 14:09:33 +00:00
|
|
|
#if defined(SK_BUILD_FOR_GOOGLE3)
|
|
|
|
// Stack frame size is limited for SK_BUILD_FOR_GOOGLE3. 4k is less than the actual max, but some functions
|
2016-02-05 15:02:38 +00:00
|
|
|
// have multiple large stack allocations.
|
|
|
|
static const int kMaxBytes = 4 * 1024;
|
|
|
|
static const int kCount = kCountRequested * sizeof(T) > kMaxBytes
|
|
|
|
? kMaxBytes / sizeof(T)
|
|
|
|
: kCountRequested;
|
|
|
|
#else
|
|
|
|
static const int kCount = kCountRequested;
|
|
|
|
#endif
|
|
|
|
|
2013-10-14 21:53:24 +00:00
|
|
|
int fCount;
|
2008-12-17 15:59:43 +00:00
|
|
|
T* fArray;
|
|
|
|
// since we come right after fArray, fStorage should be properly aligned
|
2016-02-05 15:02:38 +00:00
|
|
|
char fStorage[kCount * sizeof(T)];
|
2008-12-17 15:59:43 +00:00
|
|
|
};
|
|
|
|
|
2013-05-07 15:28:15 +00:00
|
|
|
/** Manages an array of T elements, freeing the array in the destructor.
|
|
|
|
* Does NOT call any constructors/destructors on T (T must be POD).
|
|
|
|
*/
|
2018-04-24 17:58:37 +00:00
|
|
|
template <typename T> class SkAutoTMalloc {
|
2008-12-17 15:59:43 +00:00
|
|
|
public:
|
2013-05-07 15:28:15 +00:00
|
|
|
/** Takes ownership of the ptr. The ptr must be a value which can be passed to sk_free. */
|
2018-04-24 17:58:37 +00:00
|
|
|
explicit SkAutoTMalloc(T* ptr = nullptr) : fPtr(ptr) {}
|
2013-05-07 15:28:15 +00:00
|
|
|
|
|
|
|
/** Allocates space for 'count' Ts. */
|
2018-04-24 17:58:37 +00:00
|
|
|
explicit SkAutoTMalloc(size_t count)
|
|
|
|
: fPtr(count ? (T*)sk_malloc_throw(count, sizeof(T)) : nullptr) {}
|
2011-06-30 21:32:31 +00:00
|
|
|
|
2018-04-24 17:58:37 +00:00
|
|
|
SkAutoTMalloc(SkAutoTMalloc&&) = default;
|
|
|
|
SkAutoTMalloc& operator=(SkAutoTMalloc&&) = default;
|
2011-06-30 21:32:31 +00:00
|
|
|
|
2013-05-07 15:28:15 +00:00
|
|
|
/** Resize the memory area pointed to by the current ptr preserving contents. */
|
|
|
|
void realloc(size_t count) {
|
2018-04-24 17:58:37 +00:00
|
|
|
fPtr.reset(count ? (T*)sk_realloc_throw(fPtr.release(), count * sizeof(T)) : nullptr);
|
2013-05-07 15:28:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/** Resize the memory area pointed to by the current ptr without preserving contents. */
|
2016-03-17 17:51:27 +00:00
|
|
|
T* reset(size_t count = 0) {
|
2018-04-24 17:58:37 +00:00
|
|
|
fPtr.reset(count ? (T*)sk_malloc_throw(count, sizeof(T)) : nullptr);
|
|
|
|
return this->get();
|
2011-06-30 21:32:31 +00:00
|
|
|
}
|
|
|
|
|
2018-04-24 17:58:37 +00:00
|
|
|
T* get() const { return fPtr.get(); }
|
2011-06-30 21:32:31 +00:00
|
|
|
|
2018-04-24 17:58:37 +00:00
|
|
|
operator T*() { return fPtr.get(); }
|
2011-06-30 21:32:31 +00:00
|
|
|
|
2018-04-24 17:58:37 +00:00
|
|
|
operator const T*() const { return fPtr.get(); }
|
2011-06-30 21:32:31 +00:00
|
|
|
|
2018-04-24 17:58:37 +00:00
|
|
|
T& operator[](int index) { return fPtr.get()[index]; }
|
2011-06-30 21:32:31 +00:00
|
|
|
|
2018-04-24 17:58:37 +00:00
|
|
|
const T& operator[](int index) const { return fPtr.get()[index]; }
|
2017-02-13 18:54:37 +00:00
|
|
|
|
2013-05-07 15:28:15 +00:00
|
|
|
/**
|
|
|
|
* Transfer ownership of the ptr to the caller, setting the internal
|
|
|
|
* pointer to NULL. Note that this differs from get(), which also returns
|
|
|
|
* the pointer, but it does not transfer ownership.
|
|
|
|
*/
|
2018-04-24 17:58:37 +00:00
|
|
|
T* release() { return fPtr.release(); }
|
2013-05-07 15:28:15 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
private:
|
2018-04-24 17:58:37 +00:00
|
|
|
std::unique_ptr<T, SkFunctionWrapper<void, void, sk_free>> fPtr;
|
2008-12-17 15:59:43 +00:00
|
|
|
};
|
|
|
|
|
2016-02-05 15:02:38 +00:00
|
|
|
template <size_t kCountRequested, typename T> class SkAutoSTMalloc : SkNoncopyable {
|
2008-12-17 15:59:43 +00:00
|
|
|
public:
|
2015-04-08 14:33:33 +00:00
|
|
|
SkAutoSTMalloc() : fPtr(fTStorage) {}
|
2013-05-01 14:21:20 +00:00
|
|
|
|
2011-06-30 21:32:31 +00:00
|
|
|
SkAutoSTMalloc(size_t count) {
|
2016-02-05 15:02:38 +00:00
|
|
|
if (count > kCount) {
|
2018-01-05 16:20:10 +00:00
|
|
|
fPtr = (T*)sk_malloc_throw(count, sizeof(T));
|
2016-06-23 19:55:14 +00:00
|
|
|
} else if (count) {
|
2015-04-08 14:33:33 +00:00
|
|
|
fPtr = fTStorage;
|
2016-06-23 19:55:14 +00:00
|
|
|
} else {
|
|
|
|
fPtr = nullptr;
|
2011-06-30 21:32:31 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
~SkAutoSTMalloc() {
|
|
|
|
if (fPtr != fTStorage) {
|
|
|
|
sk_free(fPtr);
|
|
|
|
}
|
2008-12-17 15:59:43 +00:00
|
|
|
}
|
2011-06-30 21:32:31 +00:00
|
|
|
|
|
|
|
// doesn't preserve contents
|
2013-06-10 18:58:11 +00:00
|
|
|
T* reset(size_t count) {
|
2011-06-30 21:32:31 +00:00
|
|
|
if (fPtr != fTStorage) {
|
2008-12-17 15:59:43 +00:00
|
|
|
sk_free(fPtr);
|
2011-06-30 21:32:31 +00:00
|
|
|
}
|
2016-02-05 15:02:38 +00:00
|
|
|
if (count > kCount) {
|
2018-01-05 16:20:10 +00:00
|
|
|
fPtr = (T*)sk_malloc_throw(count, sizeof(T));
|
2016-06-23 19:55:14 +00:00
|
|
|
} else if (count) {
|
2015-04-08 14:33:33 +00:00
|
|
|
fPtr = fTStorage;
|
2016-06-23 19:55:14 +00:00
|
|
|
} else {
|
|
|
|
fPtr = nullptr;
|
2011-06-30 21:32:31 +00:00
|
|
|
}
|
2013-06-10 18:58:11 +00:00
|
|
|
return fPtr;
|
2008-12-17 15:59:43 +00:00
|
|
|
}
|
2011-06-30 21:32:31 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
T* get() const { return fPtr; }
|
|
|
|
|
2011-06-30 21:32:31 +00:00
|
|
|
operator T*() {
|
|
|
|
return fPtr;
|
|
|
|
}
|
|
|
|
|
|
|
|
operator const T*() const {
|
|
|
|
return fPtr;
|
|
|
|
}
|
|
|
|
|
|
|
|
T& operator[](int index) {
|
|
|
|
return fPtr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
const T& operator[](int index) const {
|
|
|
|
return fPtr[index];
|
|
|
|
}
|
|
|
|
|
2015-04-08 14:33:33 +00:00
|
|
|
// Reallocs the array, can be used to shrink the allocation. Makes no attempt to be intelligent
|
|
|
|
void realloc(size_t count) {
|
2016-02-05 15:02:38 +00:00
|
|
|
if (count > kCount) {
|
2015-04-08 14:33:33 +00:00
|
|
|
if (fPtr == fTStorage) {
|
2018-01-05 16:20:10 +00:00
|
|
|
fPtr = (T*)sk_malloc_throw(count, sizeof(T));
|
2016-02-05 15:02:38 +00:00
|
|
|
memcpy(fPtr, fTStorage, kCount * sizeof(T));
|
2015-04-08 14:33:33 +00:00
|
|
|
} else {
|
2018-01-05 16:20:10 +00:00
|
|
|
fPtr = (T*)sk_realloc_throw(fPtr, count, sizeof(T));
|
2015-04-08 14:33:33 +00:00
|
|
|
}
|
2016-06-23 19:55:14 +00:00
|
|
|
} else if (count) {
|
|
|
|
if (fPtr != fTStorage) {
|
2018-01-05 16:20:10 +00:00
|
|
|
fPtr = (T*)sk_realloc_throw(fPtr, count, sizeof(T));
|
2016-06-23 19:55:14 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
this->reset(0);
|
2015-04-08 14:33:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
private:
|
2016-02-05 15:02:38 +00:00
|
|
|
// Since we use uint32_t storage, we might be able to get more elements for free.
|
|
|
|
static const size_t kCountWithPadding = SkAlign4(kCountRequested*sizeof(T)) / sizeof(T);
|
2017-12-19 14:09:33 +00:00
|
|
|
#if defined(SK_BUILD_FOR_GOOGLE3)
|
|
|
|
// Stack frame size is limited for SK_BUILD_FOR_GOOGLE3. 4k is less than the actual max, but some functions
|
2016-02-05 15:02:38 +00:00
|
|
|
// have multiple large stack allocations.
|
|
|
|
static const size_t kMaxBytes = 4 * 1024;
|
|
|
|
static const size_t kCount = kCountRequested * sizeof(T) > kMaxBytes
|
|
|
|
? kMaxBytes / sizeof(T)
|
|
|
|
: kCountWithPadding;
|
|
|
|
#else
|
|
|
|
static const size_t kCount = kCountWithPadding;
|
|
|
|
#endif
|
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
T* fPtr;
|
|
|
|
union {
|
2016-02-05 15:02:38 +00:00
|
|
|
uint32_t fStorage32[SkAlign4(kCount*sizeof(T)) >> 2];
|
2008-12-17 15:59:43 +00:00
|
|
|
T fTStorage[1]; // do NOT want to invoke T::T()
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2015-06-04 13:31:31 +00:00
|
|
|
//////////////////////////////////////////////////////////////////////////////////////////////////
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Pass the object and the storage that was offered during SkInPlaceNewCheck, and this will
|
|
|
|
* safely destroy (and free if it was dynamically allocated) the object.
|
|
|
|
*/
|
|
|
|
template <typename T> void SkInPlaceDeleteCheck(T* obj, void* storage) {
|
|
|
|
if (storage == obj) {
|
|
|
|
obj->~T();
|
|
|
|
} else {
|
2015-08-26 20:07:48 +00:00
|
|
|
delete obj;
|
2015-06-04 13:31:31 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Allocates T, using storage if it is large enough, and allocating on the heap (via new) if
|
|
|
|
* storage is not large enough.
|
|
|
|
*
|
|
|
|
* obj = SkInPlaceNewCheck<Type>(storage, size);
|
|
|
|
* ...
|
|
|
|
* SkInPlaceDeleteCheck(obj, storage);
|
|
|
|
*/
|
2018-04-24 18:57:09 +00:00
|
|
|
template<typename T, typename... Args>
|
|
|
|
T* SkInPlaceNewCheck(void* storage, size_t size, Args&&... args) {
|
|
|
|
return (sizeof(T) <= size) ? new (storage) T(std::forward<Args>(args)...)
|
|
|
|
: new T(std::forward<Args>(args)...);
|
2015-06-04 13:31:31 +00:00
|
|
|
}
|
2011-09-14 13:54:05 +00:00
|
|
|
/**
|
|
|
|
* Reserves memory that is aligned on double and pointer boundaries.
|
|
|
|
* Hopefully this is sufficient for all practical purposes.
|
|
|
|
*/
|
|
|
|
template <size_t N> class SkAlignedSStorage : SkNoncopyable {
|
|
|
|
public:
|
2015-06-04 13:31:31 +00:00
|
|
|
size_t size() const { return N; }
|
2011-09-14 13:54:05 +00:00
|
|
|
void* get() { return fData; }
|
2014-12-15 22:16:27 +00:00
|
|
|
const void* get() const { return fData; }
|
2015-06-04 13:31:31 +00:00
|
|
|
|
2011-09-14 13:54:05 +00:00
|
|
|
private:
|
|
|
|
union {
|
|
|
|
void* fPtr;
|
|
|
|
double fDouble;
|
|
|
|
char fData[N];
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Reserves memory that is aligned on double and pointer boundaries.
|
|
|
|
* Hopefully this is sufficient for all practical purposes. Otherwise,
|
|
|
|
* we have to do some arcane trickery to determine alignment of non-POD
|
|
|
|
* types. Lifetime of the memory is the lifetime of the object.
|
|
|
|
*/
|
|
|
|
template <int N, typename T> class SkAlignedSTStorage : SkNoncopyable {
|
|
|
|
public:
|
|
|
|
/**
|
|
|
|
* Returns void* because this object does not initialize the
|
|
|
|
* memory. Use placement new for types that require a cons.
|
|
|
|
*/
|
|
|
|
void* get() { return fStorage.get(); }
|
2015-08-11 21:47:42 +00:00
|
|
|
const void* get() const { return fStorage.get(); }
|
2011-09-14 13:54:05 +00:00
|
|
|
private:
|
|
|
|
SkAlignedSStorage<sizeof(T)*N> fStorage;
|
|
|
|
};
|
|
|
|
|
2017-01-11 17:44:43 +00:00
|
|
|
using SkAutoFree = std::unique_ptr<void, SkFunctionWrapper<void, void, sk_free>>;
|
|
|
|
|
2017-10-18 15:30:56 +00:00
|
|
|
template<typename C, std::size_t... Is>
|
|
|
|
constexpr auto SkMakeArrayFromIndexSequence(C c, skstd::index_sequence<Is...>)
|
|
|
|
-> std::array<skstd::result_of_t<C(std::size_t)>, sizeof...(Is)> {
|
|
|
|
return {{ c(Is)... }};
|
|
|
|
}
|
|
|
|
|
|
|
|
template<size_t N, typename C> constexpr auto SkMakeArray(C c)
|
|
|
|
-> std::array<skstd::result_of_t<C(std::size_t)>, N> {
|
|
|
|
return SkMakeArrayFromIndexSequence(c, skstd::make_index_sequence<N>{});
|
|
|
|
}
|
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
#endif
|