Parallel cache.
TBR=reed@google.com BUG=skia:1330 Review URL: https://codereview.chromium.org/1264103003
This commit is contained in:
parent
714a710c42
commit
6f2a486040
@ -29,6 +29,9 @@ void sk_atomic_store(T*, T, sk_memory_order = sk_memory_order_seq_cst);
|
|||||||
template <typename T>
|
template <typename T>
|
||||||
T sk_atomic_fetch_add(T*, T, sk_memory_order = sk_memory_order_seq_cst);
|
T sk_atomic_fetch_add(T*, T, sk_memory_order = sk_memory_order_seq_cst);
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
T sk_atomic_fetch_sub(T*, T, sk_memory_order = sk_memory_order_seq_cst);
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
bool sk_atomic_compare_exchange(T*, T* expected, T desired,
|
bool sk_atomic_compare_exchange(T*, T* expected, T desired,
|
||||||
sk_memory_order success = sk_memory_order_seq_cst,
|
sk_memory_order success = sk_memory_order_seq_cst,
|
||||||
@ -58,6 +61,10 @@ public:
|
|||||||
return sk_atomic_fetch_add(&fVal, val, mo);
|
return sk_atomic_fetch_add(&fVal, val, mo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
T fetch_sub(const T& val, sk_memory_order mo = sk_memory_order_seq_cst) {
|
||||||
|
return sk_atomic_fetch_sub(&fVal, val, mo);
|
||||||
|
}
|
||||||
|
|
||||||
bool compare_exchange(T* expected, const T& desired,
|
bool compare_exchange(T* expected, const T& desired,
|
||||||
sk_memory_order success = sk_memory_order_seq_cst,
|
sk_memory_order success = sk_memory_order_seq_cst,
|
||||||
sk_memory_order failure = sk_memory_order_seq_cst) {
|
sk_memory_order failure = sk_memory_order_seq_cst) {
|
||||||
|
@ -31,6 +31,12 @@ T sk_atomic_fetch_add(T* ptr, T val, sk_memory_order mo) {
|
|||||||
return __atomic_fetch_add(ptr, val, mo);
|
return __atomic_fetch_add(ptr, val, mo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
T sk_atomic_fetch_sub(T* ptr, T val, sk_memory_order mo) {
|
||||||
|
// All values of mo are valid.
|
||||||
|
return __atomic_fetch_sub(ptr, val, mo);
|
||||||
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
bool sk_atomic_compare_exchange(T* ptr, T* expected, T desired,
|
bool sk_atomic_compare_exchange(T* ptr, T* expected, T desired,
|
||||||
sk_memory_order success,
|
sk_memory_order success,
|
||||||
|
@ -38,6 +38,13 @@ T sk_atomic_fetch_add(T* ptr, T val, sk_memory_order mo) {
|
|||||||
return std::atomic_fetch_add_explicit(ap, val, (std::memory_order)mo);
|
return std::atomic_fetch_add_explicit(ap, val, (std::memory_order)mo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
T sk_atomic_fetch_sub(T* ptr, T val, sk_memory_order mo) {
|
||||||
|
// All values of mo are valid.
|
||||||
|
std::atomic<T>* ap = reinterpret_cast<std::atomic<T>*>(ptr);
|
||||||
|
return std::atomic_fetch_sub_explicit(ap, val, (std::memory_order)mo);
|
||||||
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
bool sk_atomic_compare_exchange(T* ptr, T* expected, T desired,
|
bool sk_atomic_compare_exchange(T* ptr, T* expected, T desired,
|
||||||
sk_memory_order success,
|
sk_memory_order success,
|
||||||
|
@ -45,6 +45,11 @@ T sk_atomic_fetch_add(T* ptr, T val, sk_memory_order) {
|
|||||||
return __sync_fetch_and_add(ptr, val);
|
return __sync_fetch_and_add(ptr, val);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
T sk_atomic_fetch_sub(T* ptr, T val, sk_memory_order) {
|
||||||
|
return __sync_fetch_and_sub(ptr, val);
|
||||||
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
bool sk_atomic_compare_exchange(T* ptr, T* expected, T desired, sk_memory_order, sk_memory_order) {
|
bool sk_atomic_compare_exchange(T* ptr, T* expected, T desired, sk_memory_order, sk_memory_order) {
|
||||||
T prev = __sync_val_compare_and_swap(ptr, *expected, desired);
|
T prev = __sync_val_compare_and_swap(ptr, *expected, desired);
|
||||||
|
@ -1476,14 +1476,12 @@ static void D1G_RectClip(const SkDraw1Glyph& state, Sk48Dot16 fx, Sk48Dot16 fy,
|
|||||||
bounds = &storage;
|
bounds = &storage;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint8_t* aa = (uint8_t*)glyph.fImage;
|
uint8_t*aa = (uint8_t*)state.fCache->findImage(glyph);
|
||||||
if (nullptr == aa) {
|
if (nullptr == aa) {
|
||||||
aa = (uint8_t*)state.fCache->findImage(glyph);
|
return; // can't rasterize glyph
|
||||||
if (nullptr == aa) {
|
|
||||||
return; // can't rasterize glyph
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
mask.fRowBytes = glyph.rowBytes();
|
mask.fRowBytes = glyph.rowBytes();
|
||||||
mask.fFormat = static_cast<SkMask::Format>(glyph.fMaskFormat);
|
mask.fFormat = static_cast<SkMask::Format>(glyph.fMaskFormat);
|
||||||
mask.fImage = aa;
|
mask.fImage = aa;
|
||||||
|
@ -47,6 +47,8 @@ class SkGlyph {
|
|||||||
uint8_t fMaskFormat;
|
uint8_t fMaskFormat;
|
||||||
int8_t fRsbDelta, fLsbDelta; // used by auto-kerning
|
int8_t fRsbDelta, fLsbDelta; // used by auto-kerning
|
||||||
int8_t fForceBW;
|
int8_t fForceBW;
|
||||||
|
mutable bool fImageIsSet;
|
||||||
|
mutable bool fPathIsSet;
|
||||||
|
|
||||||
void initWithGlyphID(uint32_t glyph_id) {
|
void initWithGlyphID(uint32_t glyph_id) {
|
||||||
this->initCommon(MakeID(glyph_id));
|
this->initCommon(MakeID(glyph_id));
|
||||||
@ -135,6 +137,8 @@ class SkGlyph {
|
|||||||
fPath = nullptr;
|
fPath = nullptr;
|
||||||
fMaskFormat = MASK_FORMAT_UNKNOWN;
|
fMaskFormat = MASK_FORMAT_UNKNOWN;
|
||||||
fForceBW = 0;
|
fForceBW = 0;
|
||||||
|
fImageIsSet = false;
|
||||||
|
fPathIsSet = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
static unsigned ID2Code(uint32_t id) {
|
static unsigned ID2Code(uint32_t id) {
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
#include "SkGlyphCache_Globals.h"
|
#include "SkGlyphCache_Globals.h"
|
||||||
#include "SkGraphics.h"
|
#include "SkGraphics.h"
|
||||||
#include "SkLazyPtr.h"
|
#include "SkLazyPtr.h"
|
||||||
|
#include "SkOnce.h"
|
||||||
#include "SkPath.h"
|
#include "SkPath.h"
|
||||||
#include "SkTemplates.h"
|
#include "SkTemplates.h"
|
||||||
#include "SkTraceMemoryDump.h"
|
#include "SkTraceMemoryDump.h"
|
||||||
@ -45,42 +46,90 @@ static SkGlyphCache_Globals& get_globals() {
|
|||||||
#define kMinAllocAmount ((sizeof(SkGlyph) + kMinGlyphImageSize) * kMinGlyphCount)
|
#define kMinAllocAmount ((sizeof(SkGlyph) + kMinGlyphImageSize) * kMinGlyphCount)
|
||||||
|
|
||||||
SkGlyphCache::SkGlyphCache(SkTypeface* typeface, const SkDescriptor* desc, SkScalerContext* ctx)
|
SkGlyphCache::SkGlyphCache(SkTypeface* typeface, const SkDescriptor* desc, SkScalerContext* ctx)
|
||||||
: fDesc(desc->copy())
|
: fNext(nullptr)
|
||||||
|
, fPrev(nullptr)
|
||||||
|
, fDesc(desc->copy())
|
||||||
|
, fRefCount(0)
|
||||||
|
, fGlyphAlloc(kMinAllocAmount)
|
||||||
|
, fMemoryUsed(sizeof(*this))
|
||||||
, fScalerContext(ctx)
|
, fScalerContext(ctx)
|
||||||
, fGlyphAlloc(kMinAllocAmount) {
|
, fAuxProcList(nullptr) {
|
||||||
SkASSERT(typeface);
|
SkASSERT(typeface);
|
||||||
SkASSERT(desc);
|
SkASSERT(desc);
|
||||||
SkASSERT(ctx);
|
SkASSERT(ctx);
|
||||||
|
|
||||||
fPrev = fNext = nullptr;
|
|
||||||
|
|
||||||
fScalerContext->getFontMetrics(&fFontMetrics);
|
fScalerContext->getFontMetrics(&fFontMetrics);
|
||||||
|
|
||||||
fMemoryUsed = sizeof(*this);
|
|
||||||
|
|
||||||
fAuxProcList = nullptr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SkGlyphCache::~SkGlyphCache() {
|
SkGlyphCache::~SkGlyphCache() {
|
||||||
fGlyphMap.foreach ([](SkGlyph* g) { delete g->fPath; });
|
fGlyphMap.foreach ([](SkGlyph* g) { delete g->fPath; });
|
||||||
SkDescriptor::Free(fDesc);
|
SkDescriptor::Free(fDesc);
|
||||||
delete fScalerContext;
|
delete fScalerContext;
|
||||||
this->invokeAndRemoveAuxProcs();
|
AuxProcRec* rec = fAuxProcList;
|
||||||
|
while (rec) {
|
||||||
|
rec->fProc(rec->fData);
|
||||||
|
AuxProcRec* next = rec->fNext;
|
||||||
|
delete rec;
|
||||||
|
rec = next;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void SkGlyphCache::increaseMemoryUsed(size_t used) {
|
||||||
|
fMemoryUsed += used;
|
||||||
|
get_globals().increaseTotalMemoryUsed(used);
|
||||||
|
}
|
||||||
|
|
||||||
|
SkGlyphCache::CharGlyphRec
|
||||||
|
SkGlyphCache::PackedUnicharIDtoCharGlyphRec(PackedUnicharID packedUnicharID) {
|
||||||
|
SkFixed x = SkGlyph::SubToFixed(SkGlyph::ID2SubX(packedUnicharID));
|
||||||
|
SkFixed y = SkGlyph::SubToFixed(SkGlyph::ID2SubY(packedUnicharID));
|
||||||
|
SkUnichar unichar = SkGlyph::ID2Code(packedUnicharID);
|
||||||
|
|
||||||
|
SkAutoMutexAcquire lock(fScalerMutex);
|
||||||
|
PackedGlyphID packedGlyphID = SkGlyph::MakeID(fScalerContext->charToGlyphID(unichar), x, y);
|
||||||
|
|
||||||
|
return {packedUnicharID, packedGlyphID};
|
||||||
}
|
}
|
||||||
|
|
||||||
SkGlyphCache::CharGlyphRec* SkGlyphCache::getCharGlyphRec(PackedUnicharID packedUnicharID) {
|
SkGlyphCache::CharGlyphRec* SkGlyphCache::getCharGlyphRec(PackedUnicharID packedUnicharID) {
|
||||||
if (nullptr == fPackedUnicharIDToPackedGlyphID.get()) {
|
if (nullptr == fPackedUnicharIDToPackedGlyphID.get()) {
|
||||||
// Allocate the array.
|
fMapMutex.releaseShared();
|
||||||
fPackedUnicharIDToPackedGlyphID.reset(kHashCount);
|
|
||||||
// Initialize array to map character and position with the impossible glyph ID. This
|
// Add the map only if there is a call for char -> glyph mapping.
|
||||||
// represents no mapping.
|
{
|
||||||
for (int i = 0; i <kHashCount; ++i) {
|
SkAutoTAcquire<SkSharedMutex> lock(fMapMutex);
|
||||||
fPackedUnicharIDToPackedGlyphID[i].fPackedUnicharID = SkGlyph::kImpossibleID;
|
|
||||||
fPackedUnicharIDToPackedGlyphID[i].fPackedGlyphID = 0;
|
// Now that the cache is locked exclusively, make sure no one added this array
|
||||||
|
// while unlocked.
|
||||||
|
if (nullptr == fPackedUnicharIDToPackedGlyphID.get()) {
|
||||||
|
// Allocate the array.
|
||||||
|
fPackedUnicharIDToPackedGlyphID.reset(new PackedUnicharIDToPackedGlyphIDMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
fPackedUnicharIDToPackedGlyphID->set(PackedUnicharIDtoCharGlyphRec(packedUnicharID));
|
||||||
}
|
}
|
||||||
|
fMapMutex.acquireShared();
|
||||||
|
|
||||||
|
return fPackedUnicharIDToPackedGlyphID->find(packedUnicharID);
|
||||||
}
|
}
|
||||||
|
|
||||||
return &fPackedUnicharIDToPackedGlyphID[SkChecksum::CheapMix(packedUnicharID) & kHashMask];
|
CharGlyphRec* answer = fPackedUnicharIDToPackedGlyphID->find(packedUnicharID);
|
||||||
|
if (nullptr == answer) {
|
||||||
|
fMapMutex.releaseShared();
|
||||||
|
// Add a new char -> glyph mapping.
|
||||||
|
{
|
||||||
|
SkAutoTAcquire<SkSharedMutex> lock(fMapMutex);
|
||||||
|
answer = fPackedUnicharIDToPackedGlyphID->find(packedUnicharID);
|
||||||
|
if (nullptr == answer) {
|
||||||
|
fPackedUnicharIDToPackedGlyphID->set(
|
||||||
|
PackedUnicharIDtoCharGlyphRec(packedUnicharID));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fMapMutex.acquireShared();
|
||||||
|
return fPackedUnicharIDToPackedGlyphID->find(packedUnicharID);
|
||||||
|
}
|
||||||
|
|
||||||
|
return answer;
|
||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
@ -95,15 +144,11 @@ uint16_t SkGlyphCache::unicharToGlyph(SkUnichar charCode) {
|
|||||||
VALIDATE();
|
VALIDATE();
|
||||||
PackedUnicharID packedUnicharID = SkGlyph::MakeID(charCode);
|
PackedUnicharID packedUnicharID = SkGlyph::MakeID(charCode);
|
||||||
const CharGlyphRec& rec = *this->getCharGlyphRec(packedUnicharID);
|
const CharGlyphRec& rec = *this->getCharGlyphRec(packedUnicharID);
|
||||||
|
return SkGlyph::ID2Code(rec.fPackedGlyphID);
|
||||||
if (rec.fPackedUnicharID == packedUnicharID) {
|
|
||||||
return SkGlyph::ID2Code(rec.fPackedGlyphID);
|
|
||||||
} else {
|
|
||||||
return fScalerContext->charToGlyphID(charCode);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SkUnichar SkGlyphCache::glyphToUnichar(uint16_t glyphID) {
|
SkUnichar SkGlyphCache::glyphToUnichar(uint16_t glyphID) {
|
||||||
|
SkAutoMutexAcquire lock(fScalerMutex);
|
||||||
return fScalerContext->glyphIDToChar(glyphID);
|
return fScalerContext->glyphIDToChar(glyphID);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,19 +162,19 @@ int SkGlyphCache::countCachedGlyphs() const {
|
|||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
SkGlyph* SkGlyphCache::lookupByChar(SkUnichar charCode, SkFixed x, SkFixed y) {
|
||||||
|
PackedUnicharID targetUnicharID = SkGlyph::MakeID(charCode, x, y);
|
||||||
|
CharGlyphRec* rec = this->getCharGlyphRec(targetUnicharID);
|
||||||
|
PackedGlyphID packedGlyphID = rec->fPackedGlyphID;
|
||||||
|
|
||||||
|
return this->lookupByPackedGlyphID(packedGlyphID);
|
||||||
|
}
|
||||||
|
|
||||||
const SkGlyph& SkGlyphCache::getUnicharAdvance(SkUnichar charCode) {
|
const SkGlyph& SkGlyphCache::getUnicharAdvance(SkUnichar charCode) {
|
||||||
VALIDATE();
|
VALIDATE();
|
||||||
return *this->lookupByChar(charCode);
|
return *this->lookupByChar(charCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
const SkGlyph& SkGlyphCache::getGlyphIDAdvance(uint16_t glyphID) {
|
|
||||||
VALIDATE();
|
|
||||||
PackedGlyphID packedGlyphID = SkGlyph::MakeID(glyphID);
|
|
||||||
return *this->lookupByPackedGlyphID(packedGlyphID);
|
|
||||||
}
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
const SkGlyph& SkGlyphCache::getUnicharMetrics(SkUnichar charCode) {
|
const SkGlyph& SkGlyphCache::getUnicharMetrics(SkUnichar charCode) {
|
||||||
VALIDATE();
|
VALIDATE();
|
||||||
return *this->lookupByChar(charCode);
|
return *this->lookupByChar(charCode);
|
||||||
@ -140,6 +185,49 @@ const SkGlyph& SkGlyphCache::getUnicharMetrics(SkUnichar charCode, SkFixed x, Sk
|
|||||||
return *this->lookupByChar(charCode, x, y);
|
return *this->lookupByChar(charCode, x, y);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
SkGlyph* SkGlyphCache::allocateNewGlyph(PackedGlyphID packedGlyphID) {
|
||||||
|
SkGlyph* glyphPtr;
|
||||||
|
{
|
||||||
|
fMapMutex.releaseShared();
|
||||||
|
{
|
||||||
|
SkAutoTAcquire<SkSharedMutex> mapLock(fMapMutex);
|
||||||
|
glyphPtr = fGlyphMap.find(packedGlyphID);
|
||||||
|
if (nullptr == glyphPtr) {
|
||||||
|
SkGlyph glyph;
|
||||||
|
glyph.initGlyphFromCombinedID(packedGlyphID);
|
||||||
|
{
|
||||||
|
SkAutoMutexAcquire lock(fScalerMutex);
|
||||||
|
fScalerContext->getMetrics(&glyph);
|
||||||
|
this->increaseMemoryUsed(sizeof(SkGlyph));
|
||||||
|
glyphPtr = fGlyphMap.set(glyph);
|
||||||
|
} // drop scaler lock
|
||||||
|
|
||||||
|
}
|
||||||
|
} // drop map lock
|
||||||
|
fMapMutex.acquireShared();
|
||||||
|
glyphPtr = fGlyphMap.find(packedGlyphID);
|
||||||
|
}
|
||||||
|
|
||||||
|
SkASSERT(glyphPtr->fID != SkGlyph::kImpossibleID);
|
||||||
|
return glyphPtr;
|
||||||
|
}
|
||||||
|
|
||||||
|
SkGlyph* SkGlyphCache::lookupByPackedGlyphID(PackedGlyphID packedGlyphID) {
|
||||||
|
SkGlyph* glyph = fGlyphMap.find(packedGlyphID);
|
||||||
|
|
||||||
|
if (nullptr == glyph) {
|
||||||
|
glyph = this->allocateNewGlyph(packedGlyphID);
|
||||||
|
}
|
||||||
|
return glyph;
|
||||||
|
}
|
||||||
|
|
||||||
|
const SkGlyph& SkGlyphCache::getGlyphIDAdvance(uint16_t glyphID) {
|
||||||
|
VALIDATE();
|
||||||
|
PackedGlyphID packedGlyphID = SkGlyph::MakeID(glyphID);
|
||||||
|
return *this->lookupByPackedGlyphID(packedGlyphID);
|
||||||
|
}
|
||||||
|
|
||||||
const SkGlyph& SkGlyphCache::getGlyphIDMetrics(uint16_t glyphID) {
|
const SkGlyph& SkGlyphCache::getGlyphIDMetrics(uint16_t glyphID) {
|
||||||
VALIDATE();
|
VALIDATE();
|
||||||
PackedGlyphID packedGlyphID = SkGlyph::MakeID(glyphID);
|
PackedGlyphID packedGlyphID = SkGlyph::MakeID(glyphID);
|
||||||
@ -152,74 +240,46 @@ const SkGlyph& SkGlyphCache::getGlyphIDMetrics(uint16_t glyphID, SkFixed x, SkFi
|
|||||||
return *this->lookupByPackedGlyphID(packedGlyphID);
|
return *this->lookupByPackedGlyphID(packedGlyphID);
|
||||||
}
|
}
|
||||||
|
|
||||||
SkGlyph* SkGlyphCache::lookupByChar(SkUnichar charCode, SkFixed x, SkFixed y) {
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
PackedUnicharID id = SkGlyph::MakeID(charCode, x, y);
|
|
||||||
CharGlyphRec* rec = this->getCharGlyphRec(id);
|
void SkGlyphCache::OnceFillInImage(GlyphAndCache gc) {
|
||||||
if (rec->fPackedUnicharID != id) {
|
SkGlyphCache* cache = gc.cache;
|
||||||
// this ID is based on the UniChar
|
const SkGlyph* glyph = gc.glyph;
|
||||||
rec->fPackedUnicharID = id;
|
cache->fScalerMutex.assertHeld();
|
||||||
// this ID is based on the glyph index
|
if (glyph->fWidth > 0 && glyph->fWidth < kMaxGlyphWidth) {
|
||||||
PackedGlyphID combinedID = SkGlyph::MakeID(fScalerContext->charToGlyphID(charCode), x, y);
|
size_t size = glyph->computeImageSize();
|
||||||
rec->fPackedGlyphID = combinedID;
|
sk_atomic_store(&const_cast<SkGlyph*>(glyph)->fImage,
|
||||||
return this->lookupByPackedGlyphID(combinedID);
|
cache->fGlyphAlloc.alloc(size, SkChunkAlloc::kReturnNil_AllocFailType),
|
||||||
} else {
|
sk_memory_order_relaxed);
|
||||||
return this->lookupByPackedGlyphID(rec->fPackedGlyphID);
|
if (glyph->fImage != nullptr) {
|
||||||
|
cache->fScalerContext->getImage(*glyph);
|
||||||
|
cache->increaseMemoryUsed(size);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SkGlyph* SkGlyphCache::lookupByPackedGlyphID(PackedGlyphID packedGlyphID) {
|
|
||||||
SkGlyph* glyph = fGlyphMap.find(packedGlyphID);
|
|
||||||
if (nullptr == glyph) {
|
|
||||||
glyph = this->allocateNewGlyph(packedGlyphID);
|
|
||||||
}
|
|
||||||
return glyph;
|
|
||||||
}
|
|
||||||
|
|
||||||
SkGlyph* SkGlyphCache::allocateNewGlyph(PackedGlyphID packedGlyphID) {
|
|
||||||
fMemoryUsed += sizeof(SkGlyph);
|
|
||||||
|
|
||||||
SkGlyph* glyphPtr;
|
|
||||||
{
|
|
||||||
SkGlyph glyph;
|
|
||||||
glyph.initGlyphFromCombinedID(packedGlyphID);
|
|
||||||
glyphPtr = fGlyphMap.set(glyph);
|
|
||||||
}
|
|
||||||
fScalerContext->getMetrics(glyphPtr);
|
|
||||||
|
|
||||||
SkASSERT(glyphPtr->fID != SkGlyph::kImpossibleID);
|
|
||||||
return glyphPtr;
|
|
||||||
}
|
|
||||||
|
|
||||||
const void* SkGlyphCache::findImage(const SkGlyph& glyph) {
|
const void* SkGlyphCache::findImage(const SkGlyph& glyph) {
|
||||||
if (glyph.fWidth > 0 && glyph.fWidth < kMaxGlyphWidth) {
|
SkOnce<SkMutex, GlyphAndCache>(
|
||||||
if (nullptr == glyph.fImage) {
|
&glyph.fImageIsSet, &fScalerMutex, &SkGlyphCache::OnceFillInImage, {this, &glyph});
|
||||||
size_t size = glyph.computeImageSize();
|
return sk_atomic_load(&glyph.fImage, sk_memory_order_seq_cst);
|
||||||
const_cast<SkGlyph&>(glyph).fImage = fGlyphAlloc.alloc(size,
|
}
|
||||||
SkChunkAlloc::kReturnNil_AllocFailType);
|
|
||||||
// check that alloc() actually succeeded
|
void SkGlyphCache::OnceFillInPath(GlyphAndCache gc) {
|
||||||
if (glyph.fImage) {
|
SkGlyphCache* cache = gc.cache;
|
||||||
fScalerContext->getImage(glyph);
|
const SkGlyph* glyph = gc.glyph;
|
||||||
// TODO: the scaler may have changed the maskformat during
|
cache->fScalerMutex.assertHeld();
|
||||||
// getImage (e.g. from AA or LCD to BW) which means we may have
|
if (glyph->fWidth > 0) {
|
||||||
// overallocated the buffer. Check if the new computedImageSize
|
sk_atomic_store(&const_cast<SkGlyph*>(glyph)->fPath, new SkPath, sk_memory_order_relaxed);
|
||||||
// is smaller, and if so, strink the alloc size in fImageAlloc.
|
cache->fScalerContext->getPath(*glyph, glyph->fPath);
|
||||||
fMemoryUsed += size;
|
size_t size = sizeof(SkPath) + glyph->fPath->countPoints() * sizeof(SkPoint);
|
||||||
}
|
cache->increaseMemoryUsed(size);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return glyph.fImage;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const SkPath* SkGlyphCache::findPath(const SkGlyph& glyph) {
|
const SkPath* SkGlyphCache::findPath(const SkGlyph& glyph) {
|
||||||
if (glyph.fWidth) {
|
SkOnce<SkMutex, GlyphAndCache>(
|
||||||
if (glyph.fPath == nullptr) {
|
&glyph.fPathIsSet, &fScalerMutex, &SkGlyphCache::OnceFillInPath, {this, &glyph});
|
||||||
const_cast<SkGlyph&>(glyph).fPath = new SkPath;
|
return sk_atomic_load(&glyph.fPath, sk_memory_order_seq_cst);
|
||||||
fScalerContext->getPath(glyph, glyph.fPath);
|
|
||||||
fMemoryUsed += sizeof(SkPath) +
|
|
||||||
glyph.fPath->countPoints() * sizeof(SkPoint);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return glyph.fPath;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void SkGlyphCache::dump() const {
|
void SkGlyphCache::dump() const {
|
||||||
@ -232,18 +292,22 @@ void SkGlyphCache::dump() const {
|
|||||||
face->getFamilyName(&name);
|
face->getFamilyName(&name);
|
||||||
|
|
||||||
SkString msg;
|
SkString msg;
|
||||||
msg.printf("cache typeface:%x %25s:%d size:%2g [%g %g %g %g] lum:%02X devG:%d pntG:%d cntr:%d glyphs:%3d",
|
msg.printf(
|
||||||
face->uniqueID(), name.c_str(), face->style(), rec.fTextSize,
|
"cache typeface:%x %25s:%d size:%2g [%g %g %g %g] "
|
||||||
matrix[SkMatrix::kMScaleX], matrix[SkMatrix::kMSkewX],
|
"lum:%02X devG:%d pntG:%d cntr:%d glyphs:%3d",
|
||||||
matrix[SkMatrix::kMSkewY], matrix[SkMatrix::kMScaleY],
|
face->uniqueID(), name.c_str(), face->style(), rec.fTextSize,
|
||||||
rec.fLumBits & 0xFF, rec.fDeviceGamma, rec.fPaintGamma, rec.fContrast,
|
matrix[SkMatrix::kMScaleX], matrix[SkMatrix::kMSkewX],
|
||||||
fGlyphMap.count());
|
matrix[SkMatrix::kMSkewY], matrix[SkMatrix::kMScaleY],
|
||||||
|
rec.fLumBits & 0xFF, rec.fDeviceGamma, rec.fPaintGamma, rec.fContrast,
|
||||||
|
fGlyphMap.count());
|
||||||
SkDebugf("%s\n", msg.c_str());
|
SkDebugf("%s\n", msg.c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
bool SkGlyphCache::getAuxProcData(void (*proc)(void*), void** dataPtr) const {
|
bool SkGlyphCache::getAuxProcData(void (*proc)(void*), void** dataPtr) const {
|
||||||
|
// Borrow the fScalerMutex to protect the AuxProc list.
|
||||||
|
SkAutoMutexAcquire lock(fScalerMutex);
|
||||||
const AuxProcRec* rec = fAuxProcList;
|
const AuxProcRec* rec = fAuxProcList;
|
||||||
while (rec) {
|
while (rec) {
|
||||||
if (rec->fProc == proc) {
|
if (rec->fProc == proc) {
|
||||||
@ -262,6 +326,8 @@ void SkGlyphCache::setAuxProc(void (*proc)(void*), void* data) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Borrow the fScalerMutex to protect the AuxProc linked list.
|
||||||
|
SkAutoMutexAcquire lock(fScalerMutex);
|
||||||
AuxProcRec* rec = fAuxProcList;
|
AuxProcRec* rec = fAuxProcList;
|
||||||
while (rec) {
|
while (rec) {
|
||||||
if (rec->fProc == proc) {
|
if (rec->fProc == proc) {
|
||||||
@ -278,27 +344,9 @@ void SkGlyphCache::setAuxProc(void (*proc)(void*), void* data) {
|
|||||||
fAuxProcList = rec;
|
fAuxProcList = rec;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SkGlyphCache::invokeAndRemoveAuxProcs() {
|
|
||||||
AuxProcRec* rec = fAuxProcList;
|
|
||||||
while (rec) {
|
|
||||||
rec->fProc(rec->fData);
|
|
||||||
AuxProcRec* next = rec->fNext;
|
|
||||||
delete rec;
|
|
||||||
rec = next;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
typedef SkAutoTAcquire<SkSpinlock> AutoAcquire;
|
||||||
class AutoAcquire {
|
|
||||||
public:
|
|
||||||
AutoAcquire(SkSpinlock& lock) : fLock(lock) { fLock.acquire(); }
|
|
||||||
~AutoAcquire() { fLock.release(); }
|
|
||||||
private:
|
|
||||||
SkSpinlock& fLock;
|
|
||||||
};
|
|
||||||
|
|
||||||
size_t SkGlyphCache_Globals::setCacheSizeLimit(size_t newLimit) {
|
size_t SkGlyphCache_Globals::setCacheSizeLimit(size_t newLimit) {
|
||||||
static const size_t minLimit = 256 * 1024;
|
static const size_t minLimit = 256 * 1024;
|
||||||
@ -329,7 +377,7 @@ int SkGlyphCache_Globals::setCacheCountLimit(int newCount) {
|
|||||||
|
|
||||||
void SkGlyphCache_Globals::purgeAll() {
|
void SkGlyphCache_Globals::purgeAll() {
|
||||||
AutoAcquire ac(fLock);
|
AutoAcquire ac(fLock);
|
||||||
this->internalPurge(fTotalMemoryUsed);
|
this->internalPurge(fTotalMemoryUsed.load());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* This guy calls the visitor from within the mutext lock, so the visitor
|
/* This guy calls the visitor from within the mutext lock, so the visitor
|
||||||
@ -338,10 +386,8 @@ void SkGlyphCache_Globals::purgeAll() {
|
|||||||
- try to acquire the mutext again
|
- try to acquire the mutext again
|
||||||
- call a fontscaler (which might call into the cache)
|
- call a fontscaler (which might call into the cache)
|
||||||
*/
|
*/
|
||||||
SkGlyphCache* SkGlyphCache::VisitCache(SkTypeface* typeface,
|
SkGlyphCache* SkGlyphCache::VisitCache(
|
||||||
const SkDescriptor* desc,
|
SkTypeface* typeface, const SkDescriptor* desc, VisitProc proc, void* context) {
|
||||||
bool (*proc)(const SkGlyphCache*, void*),
|
|
||||||
void* context) {
|
|
||||||
if (!typeface) {
|
if (!typeface) {
|
||||||
typeface = SkTypeface::GetDefaultTypeface();
|
typeface = SkTypeface::GetDefaultTypeface();
|
||||||
}
|
}
|
||||||
@ -357,11 +403,15 @@ SkGlyphCache* SkGlyphCache::VisitCache(SkTypeface* typeface,
|
|||||||
|
|
||||||
for (cache = globals.internalGetHead(); cache != nullptr; cache = cache->fNext) {
|
for (cache = globals.internalGetHead(); cache != nullptr; cache = cache->fNext) {
|
||||||
if (cache->fDesc->equals(*desc)) {
|
if (cache->fDesc->equals(*desc)) {
|
||||||
globals.internalDetachCache(cache);
|
globals.internalMoveToHead(cache);
|
||||||
|
cache->fMapMutex.acquireShared();
|
||||||
if (!proc(cache, context)) {
|
if (!proc(cache, context)) {
|
||||||
globals.internalAttachCacheToHead(cache);
|
cache->fMapMutex.releaseShared();
|
||||||
cache = nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
// The caller will take reference on this SkGlyphCache, and the corresponding
|
||||||
|
// Attach call will decrement the reference.
|
||||||
|
cache->fRefCount += 1;
|
||||||
return cache;
|
return cache;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -374,28 +424,45 @@ SkGlyphCache* SkGlyphCache::VisitCache(SkTypeface* typeface,
|
|||||||
// pass true the first time, to notice if the scalercontext failed,
|
// pass true the first time, to notice if the scalercontext failed,
|
||||||
// so we can try the purge.
|
// so we can try the purge.
|
||||||
SkScalerContext* ctx = typeface->createScalerContext(desc, true);
|
SkScalerContext* ctx = typeface->createScalerContext(desc, true);
|
||||||
if (!ctx) {
|
if (nullptr == ctx) {
|
||||||
get_globals().purgeAll();
|
get_globals().purgeAll();
|
||||||
ctx = typeface->createScalerContext(desc, false);
|
ctx = typeface->createScalerContext(desc, false);
|
||||||
SkASSERT(ctx);
|
SkASSERT(ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
cache = new SkGlyphCache(typeface, desc, ctx);
|
cache = new SkGlyphCache(typeface, desc, ctx);
|
||||||
|
|
||||||
|
globals.attachCacheToHead(cache);
|
||||||
}
|
}
|
||||||
|
|
||||||
AutoValidate av(cache);
|
AutoValidate av(cache);
|
||||||
|
AutoAcquire ac(globals.fLock);
|
||||||
|
|
||||||
|
cache->fMapMutex.acquireShared();
|
||||||
if (!proc(cache, context)) { // need to reattach
|
if (!proc(cache, context)) { // need to reattach
|
||||||
globals.attachCacheToHead(cache);
|
cache->fMapMutex.releaseShared();
|
||||||
cache = nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
// The caller will take reference on this SkGlyphCache, and the corresponding
|
||||||
|
// Attach call will decrement the reference.
|
||||||
|
cache->fRefCount += 1;
|
||||||
return cache;
|
return cache;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SkGlyphCache::AttachCache(SkGlyphCache* cache) {
|
void SkGlyphCache::AttachCache(SkGlyphCache* cache) {
|
||||||
SkASSERT(cache);
|
SkASSERT(cache);
|
||||||
SkASSERT(cache->fNext == nullptr);
|
cache->fMapMutex.releaseShared();
|
||||||
|
SkGlyphCache_Globals& globals = get_globals();
|
||||||
|
AutoAcquire ac(globals.fLock);
|
||||||
|
globals.validate();
|
||||||
|
cache->validate();
|
||||||
|
|
||||||
get_globals().attachCacheToHead(cache);
|
// Unref and delete if no longer in the LRU list.
|
||||||
|
cache->fRefCount -= 1;
|
||||||
|
if (cache->fRefCount == 0) {
|
||||||
|
delete cache;
|
||||||
|
}
|
||||||
|
globals.internalPurge();
|
||||||
}
|
}
|
||||||
|
|
||||||
static void dump_visitor(const SkGlyphCache& cache, void* context) {
|
static void dump_visitor(const SkGlyphCache& cache, void* context) {
|
||||||
@ -473,10 +540,16 @@ void SkGlyphCache::VisitAll(Visitor visitor, void* context) {
|
|||||||
void SkGlyphCache_Globals::attachCacheToHead(SkGlyphCache* cache) {
|
void SkGlyphCache_Globals::attachCacheToHead(SkGlyphCache* cache) {
|
||||||
AutoAcquire ac(fLock);
|
AutoAcquire ac(fLock);
|
||||||
|
|
||||||
|
fCacheCount += 1;
|
||||||
|
cache->fRefCount += 1;
|
||||||
|
// Access to cache->fMemoryUsed is single threaded until internalMoveToHead.
|
||||||
|
fTotalMemoryUsed.fetch_add(cache->fMemoryUsed);
|
||||||
|
|
||||||
|
this->internalMoveToHead(cache);
|
||||||
|
|
||||||
this->validate();
|
this->validate();
|
||||||
cache->validate();
|
cache->validate();
|
||||||
|
|
||||||
this->internalAttachCacheToHead(cache);
|
|
||||||
this->internalPurge();
|
this->internalPurge();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -494,13 +567,13 @@ size_t SkGlyphCache_Globals::internalPurge(size_t minBytesNeeded) {
|
|||||||
this->validate();
|
this->validate();
|
||||||
|
|
||||||
size_t bytesNeeded = 0;
|
size_t bytesNeeded = 0;
|
||||||
if (fTotalMemoryUsed > fCacheSizeLimit) {
|
if (fTotalMemoryUsed.load() > fCacheSizeLimit) {
|
||||||
bytesNeeded = fTotalMemoryUsed - fCacheSizeLimit;
|
bytesNeeded = fTotalMemoryUsed.load() - fCacheSizeLimit;
|
||||||
}
|
}
|
||||||
bytesNeeded = SkTMax(bytesNeeded, minBytesNeeded);
|
bytesNeeded = SkTMax(bytesNeeded, minBytesNeeded);
|
||||||
if (bytesNeeded) {
|
if (bytesNeeded) {
|
||||||
// no small purges!
|
// no small purges!
|
||||||
bytesNeeded = SkTMax(bytesNeeded, fTotalMemoryUsed >> 2);
|
bytesNeeded = SkTMax(bytesNeeded, fTotalMemoryUsed.load() >> 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
int countNeeded = 0;
|
int countNeeded = 0;
|
||||||
@ -526,9 +599,10 @@ size_t SkGlyphCache_Globals::internalPurge(size_t minBytesNeeded) {
|
|||||||
SkGlyphCache* prev = cache->fPrev;
|
SkGlyphCache* prev = cache->fPrev;
|
||||||
bytesFreed += cache->fMemoryUsed;
|
bytesFreed += cache->fMemoryUsed;
|
||||||
countFreed += 1;
|
countFreed += 1;
|
||||||
|
|
||||||
this->internalDetachCache(cache);
|
this->internalDetachCache(cache);
|
||||||
delete cache;
|
if (0 == cache->fRefCount) {
|
||||||
|
delete cache;
|
||||||
|
}
|
||||||
cache = prev;
|
cache = prev;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -544,34 +618,50 @@ size_t SkGlyphCache_Globals::internalPurge(size_t minBytesNeeded) {
|
|||||||
return bytesFreed;
|
return bytesFreed;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SkGlyphCache_Globals::internalAttachCacheToHead(SkGlyphCache* cache) {
|
void SkGlyphCache_Globals::internalMoveToHead(SkGlyphCache *cache) {
|
||||||
SkASSERT(nullptr == cache->fPrev && nullptr == cache->fNext);
|
if (cache != fHead) {
|
||||||
if (fHead) {
|
if (cache->fPrev) {
|
||||||
fHead->fPrev = cache;
|
cache->fPrev->fNext = cache->fNext;
|
||||||
cache->fNext = fHead;
|
}
|
||||||
|
if (cache->fNext) {
|
||||||
|
cache->fNext->fPrev = cache->fPrev;
|
||||||
|
}
|
||||||
|
cache->fNext = nullptr;
|
||||||
|
cache->fPrev = nullptr;
|
||||||
|
if (fHead) {
|
||||||
|
fHead->fPrev = cache;
|
||||||
|
cache->fNext = fHead;
|
||||||
|
}
|
||||||
|
fHead = cache;
|
||||||
}
|
}
|
||||||
fHead = cache;
|
|
||||||
|
|
||||||
fCacheCount += 1;
|
|
||||||
fTotalMemoryUsed += cache->fMemoryUsed;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void SkGlyphCache_Globals::internalDetachCache(SkGlyphCache* cache) {
|
void SkGlyphCache_Globals::internalDetachCache(SkGlyphCache* cache) {
|
||||||
SkASSERT(fCacheCount > 0);
|
|
||||||
fCacheCount -= 1;
|
fCacheCount -= 1;
|
||||||
fTotalMemoryUsed -= cache->fMemoryUsed;
|
fTotalMemoryUsed.fetch_sub(cache->fMemoryUsed);
|
||||||
|
|
||||||
if (cache->fPrev) {
|
if (cache->fPrev) {
|
||||||
cache->fPrev->fNext = cache->fNext;
|
cache->fPrev->fNext = cache->fNext;
|
||||||
} else {
|
} else {
|
||||||
|
// If cache->fPrev == nullptr then this is the head node.
|
||||||
fHead = cache->fNext;
|
fHead = cache->fNext;
|
||||||
|
if (fHead != nullptr) {
|
||||||
|
fHead->fPrev = nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (cache->fNext) {
|
if (cache->fNext) {
|
||||||
cache->fNext->fPrev = cache->fPrev;
|
cache->fNext->fPrev = cache->fPrev;
|
||||||
|
} else {
|
||||||
|
// If cache->fNext == nullptr then this is the last node.
|
||||||
|
if (cache->fPrev != nullptr) {
|
||||||
|
cache->fPrev->fNext = nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
cache->fPrev = cache->fNext = nullptr;
|
cache->fPrev = cache->fNext = nullptr;
|
||||||
|
cache->fRefCount -= 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
#ifdef SK_DEBUG
|
#ifdef SK_DEBUG
|
||||||
@ -590,20 +680,16 @@ void SkGlyphCache::validate() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void SkGlyphCache_Globals::validate() const {
|
void SkGlyphCache_Globals::validate() const {
|
||||||
size_t computedBytes = 0;
|
|
||||||
int computedCount = 0;
|
int computedCount = 0;
|
||||||
|
|
||||||
const SkGlyphCache* head = fHead;
|
SkGlyphCache* head = fHead;
|
||||||
while (head != nullptr) {
|
while (head != nullptr) {
|
||||||
computedBytes += head->fMemoryUsed;
|
|
||||||
computedCount += 1;
|
computedCount += 1;
|
||||||
head = head->fNext;
|
head = head->fNext;
|
||||||
}
|
}
|
||||||
|
|
||||||
SkASSERTF(fCacheCount == computedCount, "fCacheCount: %d, computedCount: %d", fCacheCount,
|
SkASSERTF(fCacheCount == computedCount, "fCacheCount: %d, computedCount: %d", fCacheCount,
|
||||||
computedCount);
|
computedCount);
|
||||||
SkASSERTF(fTotalMemoryUsed == computedBytes, "fTotalMemoryUsed: %d, computedBytes: %d",
|
|
||||||
fTotalMemoryUsed, computedBytes);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -11,8 +11,11 @@
|
|||||||
#include "SkChunkAlloc.h"
|
#include "SkChunkAlloc.h"
|
||||||
#include "SkDescriptor.h"
|
#include "SkDescriptor.h"
|
||||||
#include "SkGlyph.h"
|
#include "SkGlyph.h"
|
||||||
|
#include "SkMutex.h"
|
||||||
#include "SkTHash.h"
|
#include "SkTHash.h"
|
||||||
#include "SkScalerContext.h"
|
#include "SkScalerContext.h"
|
||||||
|
#include "SkSharedMutex.h"
|
||||||
|
#include "SkSpinlock.h"
|
||||||
#include "SkTemplates.h"
|
#include "SkTemplates.h"
|
||||||
#include "SkTDArray.h"
|
#include "SkTDArray.h"
|
||||||
|
|
||||||
@ -119,12 +122,23 @@ public:
|
|||||||
|
|
||||||
SkScalerContext* getScalerContext() const { return fScalerContext; }
|
SkScalerContext* getScalerContext() const { return fScalerContext; }
|
||||||
|
|
||||||
|
struct GlyphAndCache {
|
||||||
|
SkGlyphCache* cache;
|
||||||
|
const SkGlyph* glyph;
|
||||||
|
};
|
||||||
|
|
||||||
|
static void OnceFillInImage(GlyphAndCache gc);
|
||||||
|
|
||||||
|
static void OnceFillInPath(GlyphAndCache gc);
|
||||||
|
|
||||||
|
typedef bool (*VisitProc)(const SkGlyphCache*, void*);
|
||||||
|
|
||||||
/** Find a matching cache entry, and call proc() with it. If none is found create a new one.
|
/** Find a matching cache entry, and call proc() with it. If none is found create a new one.
|
||||||
If the proc() returns true, detach the cache and return it, otherwise leave it and return
|
If the proc() returns true, detach the cache and return it, otherwise leave it and return
|
||||||
nullptr.
|
nullptr.
|
||||||
*/
|
*/
|
||||||
static SkGlyphCache* VisitCache(SkTypeface*, const SkDescriptor* desc,
|
static SkGlyphCache* VisitCache(SkTypeface*, const SkDescriptor* desc,
|
||||||
bool (*proc)(const SkGlyphCache*, void*),
|
VisitProc proc,
|
||||||
void* context);
|
void* context);
|
||||||
|
|
||||||
/** Given a strike that was returned by either VisitCache() or DetachCache() add it back into
|
/** Given a strike that was returned by either VisitCache() or DetachCache() add it back into
|
||||||
@ -181,18 +195,21 @@ public:
|
|||||||
private:
|
private:
|
||||||
friend class SkGlyphCache_Globals;
|
friend class SkGlyphCache_Globals;
|
||||||
|
|
||||||
enum {
|
|
||||||
kHashBits = 8,
|
|
||||||
kHashCount = 1 << kHashBits,
|
|
||||||
kHashMask = kHashCount - 1
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef uint32_t PackedGlyphID; // glyph-index + subpixel-pos
|
typedef uint32_t PackedGlyphID; // glyph-index + subpixel-pos
|
||||||
typedef uint32_t PackedUnicharID; // unichar + subpixel-pos
|
typedef uint32_t PackedUnicharID; // unichar + subpixel-pos
|
||||||
|
|
||||||
struct CharGlyphRec {
|
struct CharGlyphRec {
|
||||||
PackedUnicharID fPackedUnicharID;
|
class HashTraits {
|
||||||
PackedGlyphID fPackedGlyphID;
|
public:
|
||||||
|
static PackedUnicharID GetKey(const CharGlyphRec& rec) {
|
||||||
|
return rec.fPackedUnicharID;
|
||||||
|
}
|
||||||
|
static uint32_t Hash(PackedUnicharID unicharID) {
|
||||||
|
return SkChecksum::CheapMix(unicharID);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
PackedUnicharID fPackedUnicharID;
|
||||||
|
PackedGlyphID fPackedGlyphID;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct AuxProcRec {
|
struct AuxProcRec {
|
||||||
@ -205,6 +222,9 @@ private:
|
|||||||
SkGlyphCache(SkTypeface*, const SkDescriptor*, SkScalerContext*);
|
SkGlyphCache(SkTypeface*, const SkDescriptor*, SkScalerContext*);
|
||||||
~SkGlyphCache();
|
~SkGlyphCache();
|
||||||
|
|
||||||
|
// Increase the memory used keeping the cache and the global size in sync.
|
||||||
|
void increaseMemoryUsed(size_t used);
|
||||||
|
|
||||||
// Return the SkGlyph* associated with MakeID. The id parameter is the
|
// Return the SkGlyph* associated with MakeID. The id parameter is the
|
||||||
// combined glyph/x/y id generated by MakeID. If it is just a glyph id
|
// combined glyph/x/y id generated by MakeID. If it is just a glyph id
|
||||||
// then x and y are assumed to be zero.
|
// then x and y are assumed to be zero.
|
||||||
@ -216,32 +236,45 @@ private:
|
|||||||
// Return a new SkGlyph for the glyph ID and subpixel position id.
|
// Return a new SkGlyph for the glyph ID and subpixel position id.
|
||||||
SkGlyph* allocateNewGlyph(PackedGlyphID packedGlyphID);
|
SkGlyph* allocateNewGlyph(PackedGlyphID packedGlyphID);
|
||||||
|
|
||||||
|
// Add the full metrics to an existing glyph.
|
||||||
|
void addFullMetrics(SkGlyph* glyph);
|
||||||
|
|
||||||
static bool DetachProc(const SkGlyphCache*, void*) { return true; }
|
static bool DetachProc(const SkGlyphCache*, void*) { return true; }
|
||||||
|
|
||||||
|
CharGlyphRec PackedUnicharIDtoCharGlyphRec(PackedUnicharID packedUnicharID);
|
||||||
|
|
||||||
// The id arg is a combined id generated by MakeID.
|
// The id arg is a combined id generated by MakeID.
|
||||||
CharGlyphRec* getCharGlyphRec(PackedUnicharID id);
|
CharGlyphRec* getCharGlyphRec(PackedUnicharID id);
|
||||||
|
|
||||||
void invokeAndRemoveAuxProcs();
|
|
||||||
|
|
||||||
inline static SkGlyphCache* FindTail(SkGlyphCache* head);
|
inline static SkGlyphCache* FindTail(SkGlyphCache* head);
|
||||||
|
|
||||||
|
// The following are protected by the SkGlyphCache_Globals fLock mutex.
|
||||||
|
// Note: the following fields are protected by a mutex in a different class.
|
||||||
SkGlyphCache* fNext;
|
SkGlyphCache* fNext;
|
||||||
SkGlyphCache* fPrev;
|
SkGlyphCache* fPrev;
|
||||||
SkDescriptor* const fDesc;
|
SkDescriptor* const fDesc;
|
||||||
SkScalerContext* const fScalerContext;
|
|
||||||
SkPaint::FontMetrics fFontMetrics;
|
SkPaint::FontMetrics fFontMetrics;
|
||||||
|
int fRefCount;
|
||||||
|
|
||||||
|
// The following fields are protected by fMapMutex.
|
||||||
|
mutable SkSharedMutex fMapMutex;
|
||||||
// Map from a combined GlyphID and sub-pixel position to a SkGlyph.
|
// Map from a combined GlyphID and sub-pixel position to a SkGlyph.
|
||||||
SkTHashTable<SkGlyph, PackedGlyphID, SkGlyph::HashTraits> fGlyphMap;
|
SkTHashTable<SkGlyph, PackedGlyphID, SkGlyph::HashTraits> fGlyphMap;
|
||||||
|
|
||||||
SkChunkAlloc fGlyphAlloc;
|
SkChunkAlloc fGlyphAlloc;
|
||||||
|
typedef SkTHashTable<CharGlyphRec, PackedUnicharID, CharGlyphRec::HashTraits>
|
||||||
SkAutoTArray<CharGlyphRec> fPackedUnicharIDToPackedGlyphID;
|
PackedUnicharIDToPackedGlyphIDMap;
|
||||||
|
SkAutoTDelete<PackedUnicharIDToPackedGlyphIDMap> fPackedUnicharIDToPackedGlyphID;
|
||||||
// used to track (approx) how much ram is tied-up in this cache
|
// used to track (approx) how much ram is tied-up in this cache
|
||||||
size_t fMemoryUsed;
|
size_t fMemoryUsed;
|
||||||
|
|
||||||
|
// The FScalerMutex protects the following fields. It is mainly used to ensure single-threaded
|
||||||
|
// access to the font scaler, but it also protects the fAuxProcList.
|
||||||
|
mutable SkMutex fScalerMutex;
|
||||||
|
SkScalerContext* const fScalerContext;
|
||||||
AuxProcRec* fAuxProcList;
|
AuxProcRec* fAuxProcList;
|
||||||
|
|
||||||
|
// BEWARE: Mutex ordering
|
||||||
|
// If you need to hold both fMapMutex and fScalerMutex then fMapMutex must be held first.
|
||||||
};
|
};
|
||||||
|
|
||||||
class SkAutoGlyphCacheBase {
|
class SkAutoGlyphCacheBase {
|
||||||
|
@ -26,8 +26,9 @@
|
|||||||
class SkGlyphCache_Globals {
|
class SkGlyphCache_Globals {
|
||||||
public:
|
public:
|
||||||
SkGlyphCache_Globals() {
|
SkGlyphCache_Globals() {
|
||||||
|
|
||||||
fHead = nullptr;
|
fHead = nullptr;
|
||||||
fTotalMemoryUsed = 0;
|
fTotalMemoryUsed.store(0);
|
||||||
fCacheSizeLimit = SK_DEFAULT_FONT_CACHE_LIMIT;
|
fCacheSizeLimit = SK_DEFAULT_FONT_CACHE_LIMIT;
|
||||||
fCacheCount = 0;
|
fCacheCount = 0;
|
||||||
fCacheCountLimit = SK_DEFAULT_FONT_CACHE_COUNT_LIMIT;
|
fCacheCountLimit = SK_DEFAULT_FONT_CACHE_COUNT_LIMIT;
|
||||||
@ -47,7 +48,8 @@ public:
|
|||||||
SkGlyphCache* internalGetHead() const { return fHead; }
|
SkGlyphCache* internalGetHead() const { return fHead; }
|
||||||
SkGlyphCache* internalGetTail() const;
|
SkGlyphCache* internalGetTail() const;
|
||||||
|
|
||||||
size_t getTotalMemoryUsed() const { return fTotalMemoryUsed; }
|
size_t getTotalMemoryUsed() const { return fTotalMemoryUsed.load(); }
|
||||||
|
void increaseTotalMemoryUsed(size_t increase) { fTotalMemoryUsed.fetch_add(increase);}
|
||||||
int getCacheCountUsed() const { return fCacheCount; }
|
int getCacheCountUsed() const { return fCacheCount; }
|
||||||
|
|
||||||
#ifdef SK_DEBUG
|
#ifdef SK_DEBUG
|
||||||
@ -66,7 +68,7 @@ public:
|
|||||||
// or count limit.
|
// or count limit.
|
||||||
bool isOverBudget() const {
|
bool isOverBudget() const {
|
||||||
return fCacheCount > fCacheCountLimit ||
|
return fCacheCount > fCacheCountLimit ||
|
||||||
fTotalMemoryUsed > fCacheSizeLimit;
|
fTotalMemoryUsed.load() > fCacheSizeLimit;
|
||||||
}
|
}
|
||||||
|
|
||||||
void purgeAll(); // does not change budget
|
void purgeAll(); // does not change budget
|
||||||
@ -75,20 +77,21 @@ public:
|
|||||||
void attachCacheToHead(SkGlyphCache*);
|
void attachCacheToHead(SkGlyphCache*);
|
||||||
|
|
||||||
// can only be called when the mutex is already held
|
// can only be called when the mutex is already held
|
||||||
void internalDetachCache(SkGlyphCache*);
|
void internalMoveToHead(SkGlyphCache *);
|
||||||
void internalAttachCacheToHead(SkGlyphCache*);
|
|
||||||
|
|
||||||
private:
|
|
||||||
SkGlyphCache* fHead;
|
|
||||||
size_t fTotalMemoryUsed;
|
|
||||||
size_t fCacheSizeLimit;
|
|
||||||
int32_t fCacheCountLimit;
|
|
||||||
int32_t fCacheCount;
|
|
||||||
|
|
||||||
// Checkout budgets, modulated by the specified min-bytes-needed-to-purge,
|
// Checkout budgets, modulated by the specified min-bytes-needed-to-purge,
|
||||||
// and attempt to purge caches to match.
|
// and attempt to purge caches to match.
|
||||||
// Returns number of bytes freed.
|
// Returns number of bytes freed.
|
||||||
|
void internalDetachCache(SkGlyphCache* cache);
|
||||||
size_t internalPurge(size_t minBytesNeeded = 0);
|
size_t internalPurge(size_t minBytesNeeded = 0);
|
||||||
|
|
||||||
|
private:
|
||||||
|
SkGlyphCache* fHead;
|
||||||
|
SkAtomic<size_t> fTotalMemoryUsed;
|
||||||
|
size_t fCacheSizeLimit;
|
||||||
|
int32_t fCacheCountLimit;
|
||||||
|
int32_t fCacheCount;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
Loading…
Reference in New Issue
Block a user