Add more thread annotations to SkStrike

* remove internalGetHead/Tail
* make validate private because fLock must be held.

Change-Id: If998cfe8ba0414e4520807034f841e40b94aa206
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/260284
Reviewed-by: Mike Klein <mtklein@google.com>
Commit-Queue: Herb Derby <herb@google.com>
This commit is contained in:
Herb Derby 2019-12-16 12:19:03 -05:00 committed by Skia Commit-Bot
parent abb92de1c2
commit fdb2b7d530
2 changed files with 26 additions and 20 deletions

View File

@ -269,7 +269,7 @@ SkExclusiveStrikePtr SkStrikeCache::findStrikeExclusive(const SkDescriptor& desc
auto SkStrikeCache::findAndDetachStrike(const SkDescriptor& desc) -> Node* {
SkAutoSpinlock ac(fLock);
for (Node* node = internalGetHead(); node != nullptr; node = node->fNext) {
for (Node* node = fHead; node != nullptr; node = node->fNext) {
if (node->fStrike.getDescriptor() == desc) {
this->internalDetachCache(node);
return node;
@ -311,7 +311,7 @@ bool SkStrikeCache::desperationSearchForImage(const SkDescriptor& desc, SkGlyph*
SkAutoSpinlock ac(fLock);
SkGlyphID glyphID = glyph->getGlyphID();
for (Node* node = internalGetHead(); node != nullptr; node = node->fNext) {
for (Node* node = fHead; node != nullptr; node = node->fNext) {
if (loose_compare(node->fStrike.getDescriptor(), desc)) {
if (SkGlyph *fallback = node->fStrike.glyphOrNull(glyph->getPackedID())) {
// This desperate-match node may disappear as soon as we drop fLock, so we
@ -342,7 +342,7 @@ bool SkStrikeCache::desperationSearchForPath(
//
// This will have to search the sub-pixel positions too.
// There is also a problem with accounting for cache size with shared path data.
for (Node* node = internalGetHead(); node != nullptr; node = node->fNext) {
for (Node* node = fHead; node != nullptr; node = node->fNext) {
if (loose_compare(node->fStrike.getDescriptor(), desc)) {
if (SkGlyph *from = node->fStrike.glyphOrNull(SkPackedGlyphID{glyphID})) {
if (from->setPathHasBeenCalled() && from->path() != nullptr) {
@ -456,7 +456,7 @@ void SkStrikeCache::forEachStrike(std::function<void(const SkStrike&)> visitor)
this->validate();
for (Node* node = this->internalGetHead(); node != nullptr; node = node->fNext) {
for (Node* node = fHead; node != nullptr; node = node->fNext) {
visitor(node->fStrike);
}
}
@ -491,7 +491,7 @@ size_t SkStrikeCache::internalPurge(size_t minBytesNeeded) {
// Start at the tail and proceed backwards deleting; the list is in LRU
// order, with unimportant entries at the tail.
Node* node = this->internalGetTail();
Node* node = fTail;
while (node != nullptr && (bytesFreed < bytesNeeded || countFreed < countNeeded)) {
Node* prev = node->fPrev;
@ -577,10 +577,15 @@ void SkStrikeCache::validate() const {
node = node->fNext;
}
SkASSERTF(fCacheCount == computedCount, "fCacheCount: %d, computedCount: %d", fCacheCount,
computedCount);
SkASSERTF(fTotalMemoryUsed == computedBytes, "fTotalMemoryUsed: %d, computedBytes: %d",
fTotalMemoryUsed, computedBytes);
// Can't use SkASSERTF because it looses thread annotations.
if (fCacheCount != computedCount) {
SkDebugf("fCacheCount: %d, computedCount: %d", fCacheCount, computedCount);
SK_ABORT("fCacheCount != computedCount");
}
if (fTotalMemoryUsed != computedBytes) {
SkDebugf("fTotalMemoryUsed: %d, computedBytes: %d", fTotalMemoryUsed, computedBytes);
SK_ABORT("fTotalMemoryUsed == computedBytes");
}
}
#endif

View File

@ -116,19 +116,22 @@ public:
int getCachePointSizeLimit() const;
int setCachePointSizeLimit(int limit);
#ifdef SK_DEBUG
// A simple accounting of what each glyph cache reports and the strike cache total.
void validate() const SK_REQUIRES(fLock);
// Make sure that each glyph cache's memory tracking and actual memory used are in sync.
void validateGlyphCacheDataSize() const;
#else
void validate() const {}
void validateGlyphCacheDataSize() const {}
#endif
private:
Node* findAndDetachStrike(const SkDescriptor&);
#ifdef SK_DEBUG
// A simple accounting of what each glyph cache reports and the strike cache total.
void validate() const SK_REQUIRES(fLock);
#else
void validate() const {}
#endif
Node* findAndDetachStrike(const SkDescriptor&) SK_EXCLUDES(fLock);
Node* createStrike(
const SkDescriptor& desc,
std::unique_ptr<SkScalerContext> scaler,
@ -137,12 +140,10 @@ private:
Node* findOrCreateStrike(
const SkDescriptor& desc,
const SkScalerContextEffects& effects,
const SkTypeface& typeface);
void attachNode(Node* node);
const SkTypeface& typeface) SK_EXCLUDES(fLock);
void attachNode(Node* node) SK_EXCLUDES(fLock);
// The following methods can only be called when mutex is already held.
Node* internalGetHead() const SK_REQUIRES(fLock) { return fHead; }
Node* internalGetTail() const SK_REQUIRES(fLock) { return fTail; }
void internalDetachCache(Node*) SK_REQUIRES(fLock);
void internalAttachToHead(Node*) SK_REQUIRES(fLock);
@ -156,10 +157,10 @@ private:
mutable SkSpinlock fLock;
Node* fHead SK_GUARDED_BY(fLock) {nullptr};
Node* fTail SK_GUARDED_BY(fLock) {nullptr};
size_t fTotalMemoryUsed{0};
size_t fCacheSizeLimit{SK_DEFAULT_FONT_CACHE_LIMIT};
size_t fTotalMemoryUsed SK_GUARDED_BY(fLock) {0};
int32_t fCacheCountLimit{SK_DEFAULT_FONT_CACHE_COUNT_LIMIT};
int32_t fCacheCount{0};
int32_t fCacheCount SK_GUARDED_BY(fLock) {0};
int32_t fPointSizeLimit{SK_DEFAULT_FONT_CACHE_POINT_SIZE_LIMIT};
};