Multi-threaded strike cache
Allow multiple threads to share the same strike. The old system of removing the cache from the linked list is no longer. The strikes stay in the list and can be found by other threads. * Removed strike size verification. There was no way to get the locks to work out. The whole point of the change was to have multiple threads muting the structure at the same time. * Strikes are now refed instead of being checked out. Therefore, ExclusiveStrikePtr is now just wraps an sk_sp, and should be renamed in a future CL. Change-Id: I832642332a3106e30745f9cdd3156ae72d41fd0b Reviewed-on: https://skia-review.googlesource.com/c/skia/+/272057 Reviewed-by: Ben Wagner <bungeman@google.com> Commit-Queue: Herb Derby <herb@google.com>
This commit is contained in:
parent
7b0ed557a9
commit
a6cd7c0b1f
@ -237,6 +237,7 @@ size_t SkScalerCache::prepareForPathDrawing(
|
||||
|
||||
void SkScalerCache::findIntercepts(const SkScalar bounds[2], SkScalar scale, SkScalar xPos,
|
||||
SkGlyph* glyph, SkScalar* array, int* count) {
|
||||
SkAutoMutexExclusive lock{fMu};
|
||||
glyph->ensureIntercepts(bounds, scale, xPos, array, count, &fAlloc);
|
||||
}
|
||||
|
||||
@ -258,21 +259,3 @@ void SkScalerCache::dump() const {
|
||||
SkDebugf("%s\n", msg.c_str());
|
||||
}
|
||||
|
||||
#ifdef SK_DEBUG
|
||||
size_t SkScalerCache::recalculateMemoryUsed() const {
|
||||
SkAutoMutexExclusive lock{fMu};
|
||||
size_t memoryUsed = sizeof(*this);
|
||||
fGlyphMap.foreach ([&memoryUsed](const SkGlyph* glyphPtr) {
|
||||
memoryUsed += sizeof(SkGlyph);
|
||||
if (glyphPtr->setImageHasBeenCalled()) {
|
||||
memoryUsed += glyphPtr->imageSize();
|
||||
}
|
||||
if (glyphPtr->setPathHasBeenCalled() && glyphPtr->path() != nullptr) {
|
||||
memoryUsed += glyphPtr->path()->approximateBytesUsed();
|
||||
}
|
||||
});
|
||||
return memoryUsed;
|
||||
}
|
||||
#endif // SK_DEBUG
|
||||
|
||||
|
||||
|
@ -85,10 +85,6 @@ public:
|
||||
|
||||
SkScalerContext* getScalerContext() const { return fScalerContext.get(); }
|
||||
|
||||
#ifdef SK_DEBUG
|
||||
size_t recalculateMemoryUsed() const SK_EXCLUDES(fMu);
|
||||
#endif
|
||||
|
||||
private:
|
||||
class GlyphMapHashTraits {
|
||||
public:
|
||||
|
@ -31,35 +31,25 @@ SkStrikeCache* SkStrikeCache::GlobalStrikeCache() {
|
||||
return cache;
|
||||
}
|
||||
|
||||
SkStrikeCache::ExclusiveStrikePtr::ExclusiveStrikePtr(SkStrikeCache::Strike* strike)
|
||||
: fStrike{strike} {}
|
||||
SkStrikeCache::ExclusiveStrikePtr::ExclusiveStrikePtr(sk_sp<Strike> strike)
|
||||
: fStrike{std::move(strike)} {}
|
||||
|
||||
SkStrikeCache::ExclusiveStrikePtr::ExclusiveStrikePtr()
|
||||
: fStrike{nullptr} {}
|
||||
|
||||
SkStrikeCache::ExclusiveStrikePtr::ExclusiveStrikePtr(ExclusiveStrikePtr&& o)
|
||||
: fStrike{o.fStrike} {
|
||||
: fStrike{std::move(o.fStrike)} {
|
||||
o.fStrike = nullptr;
|
||||
}
|
||||
|
||||
SkStrikeCache::ExclusiveStrikePtr&
|
||||
SkStrikeCache::ExclusiveStrikePtr::operator = (ExclusiveStrikePtr&& o) {
|
||||
if (fStrike != nullptr) {
|
||||
fStrike->fStrikeCache->attachStrike(fStrike);
|
||||
}
|
||||
fStrike = o.fStrike;
|
||||
o.fStrike = nullptr;
|
||||
SkStrikeCache::ExclusiveStrikePtr::operator = (ExclusiveStrikePtr&& that) {
|
||||
fStrike = std::move(that.fStrike);
|
||||
return *this;
|
||||
}
|
||||
|
||||
SkStrikeCache::ExclusiveStrikePtr::~ExclusiveStrikePtr() {
|
||||
if (fStrike != nullptr) {
|
||||
fStrike->fStrikeCache->attachStrike(fStrike);
|
||||
}
|
||||
}
|
||||
|
||||
SkStrike* SkStrikeCache::ExclusiveStrikePtr::get() const {
|
||||
return fStrike;
|
||||
return fStrike.get();
|
||||
}
|
||||
|
||||
SkStrike* SkStrikeCache::ExclusiveStrikePtr::operator -> () const {
|
||||
@ -104,8 +94,8 @@ SkExclusiveStrikePtr SkStrikeCache::findOrCreateStrikeExclusive(
|
||||
|
||||
auto SkStrikeCache::findOrCreateStrike(const SkDescriptor& desc,
|
||||
const SkScalerContextEffects& effects,
|
||||
const SkTypeface& typeface) -> Strike* {
|
||||
Strike* strike = this->findAndDetachStrike(desc);
|
||||
const SkTypeface& typeface) -> sk_sp<Strike> {
|
||||
sk_sp<Strike> strike = this->findStrikeOrNull(desc);
|
||||
if (strike == nullptr) {
|
||||
auto scaler = typeface.createScalerContext(effects, &desc);
|
||||
strike = this->createStrike(desc, std::move(scaler));
|
||||
@ -116,7 +106,7 @@ auto SkStrikeCache::findOrCreateStrike(const SkDescriptor& desc,
|
||||
SkScopedStrikeForGPU SkStrikeCache::findOrCreateScopedStrike(const SkDescriptor& desc,
|
||||
const SkScalerContextEffects& effects,
|
||||
const SkTypeface& typeface) {
|
||||
return SkScopedStrikeForGPU{this->findOrCreateStrike(desc, effects, typeface)};
|
||||
return SkScopedStrikeForGPU{this->findOrCreateStrike(desc, effects, typeface).release()};
|
||||
}
|
||||
|
||||
void SkStrikeCache::PurgeAll() {
|
||||
@ -188,30 +178,30 @@ void SkStrikeCache::DumpMemoryStatistics(SkTraceMemoryDump* dump) {
|
||||
GlobalStrikeCache()->forEachStrike(visitor);
|
||||
}
|
||||
|
||||
|
||||
void SkStrikeCache::attachStrike(Strike* strike) {
|
||||
if (strike == nullptr) {
|
||||
return;
|
||||
}
|
||||
SkAutoSpinlock ac(fLock);
|
||||
|
||||
this->validate();
|
||||
|
||||
this->internalAttachToHead(strike);
|
||||
this->internalPurge();
|
||||
}
|
||||
|
||||
SkExclusiveStrikePtr SkStrikeCache::findStrikeExclusive(const SkDescriptor& desc) {
|
||||
return SkExclusiveStrikePtr(this->findAndDetachStrike(desc));
|
||||
return SkExclusiveStrikePtr(this->findStrikeOrNull(desc));
|
||||
}
|
||||
|
||||
auto SkStrikeCache::findAndDetachStrike(const SkDescriptor& desc) -> Strike* {
|
||||
auto SkStrikeCache::findStrikeOrNull(const SkDescriptor& desc) -> sk_sp<Strike> {
|
||||
SkAutoSpinlock ac(fLock);
|
||||
|
||||
for (Strike* strike = fHead; strike != nullptr; strike = strike->fNext) {
|
||||
if (strike->fScalerCache.getDescriptor() == desc) {
|
||||
this->internalDetachStrike(strike);
|
||||
return strike;
|
||||
if (fHead != strike) {
|
||||
// Make most recently used
|
||||
strike->fPrev->fNext = strike->fNext;
|
||||
if (strike->fNext != nullptr) {
|
||||
strike->fNext->fPrev = strike->fPrev;
|
||||
} else {
|
||||
fTail = strike->fPrev;
|
||||
}
|
||||
fHead->fPrev = strike;
|
||||
strike->fNext = fHead;
|
||||
strike->fPrev = nullptr;
|
||||
fHead = strike;
|
||||
}
|
||||
|
||||
return sk_ref_sp(strike);
|
||||
}
|
||||
}
|
||||
|
||||
@ -232,8 +222,12 @@ auto SkStrikeCache::createStrike(
|
||||
const SkDescriptor& desc,
|
||||
std::unique_ptr<SkScalerContext> scaler,
|
||||
SkFontMetrics* maybeMetrics,
|
||||
std::unique_ptr<SkStrikePinner> pinner) -> Strike* {
|
||||
return new Strike{this, desc, std::move(scaler), maybeMetrics, std::move(pinner)};
|
||||
std::unique_ptr<SkStrikePinner> pinner) -> sk_sp<Strike> {
|
||||
auto strike =
|
||||
sk_make_sp<Strike>(this, desc, std::move(scaler), maybeMetrics, std::move(pinner));
|
||||
SkAutoSpinlock lock{fLock};
|
||||
this->internalAttachToHead(strike);
|
||||
return strike;
|
||||
}
|
||||
|
||||
void SkStrikeCache::purgeAll() {
|
||||
@ -353,8 +347,7 @@ size_t SkStrikeCache::internalPurge(size_t minBytesNeeded) {
|
||||
if (strike->fPinner == nullptr || strike->fPinner->canDelete()) {
|
||||
bytesFreed += strike->fMemoryUsed;
|
||||
countFreed += 1;
|
||||
this->internalDetachStrike(strike);
|
||||
strike->unref();
|
||||
this->internalRemoveStrike(strike);
|
||||
}
|
||||
strike = prev;
|
||||
}
|
||||
@ -371,23 +364,25 @@ size_t SkStrikeCache::internalPurge(size_t minBytesNeeded) {
|
||||
return bytesFreed;
|
||||
}
|
||||
|
||||
void SkStrikeCache::internalAttachToHead(Strike* strike) {
|
||||
void SkStrikeCache::internalAttachToHead(sk_sp<Strike> strike) {
|
||||
SkASSERT(nullptr == strike->fPrev && nullptr == strike->fNext);
|
||||
if (fHead) {
|
||||
fHead->fPrev = strike;
|
||||
strike->fNext = fHead;
|
||||
}
|
||||
fHead = strike;
|
||||
|
||||
if (fTail == nullptr) {
|
||||
fTail = strike;
|
||||
}
|
||||
|
||||
fCacheCount += 1;
|
||||
fTotalMemoryUsed += strike->fMemoryUsed;
|
||||
|
||||
if (fHead) {
|
||||
fHead->fPrev = strike.get();
|
||||
strike->fNext = fHead;
|
||||
}
|
||||
|
||||
if (fTail == nullptr) {
|
||||
fTail = strike.get();
|
||||
}
|
||||
|
||||
fHead = strike.release(); // Transfer ownership of strike to the cache list.
|
||||
}
|
||||
|
||||
void SkStrikeCache::internalDetachStrike(Strike* strike) {
|
||||
void SkStrikeCache::internalRemoveStrike(Strike* strike) {
|
||||
SkASSERT(fCacheCount > 0);
|
||||
fCacheCount -= 1;
|
||||
fTotalMemoryUsed -= strike->fMemoryUsed;
|
||||
@ -403,23 +398,10 @@ void SkStrikeCache::internalDetachStrike(Strike* strike) {
|
||||
fTail = strike->fPrev;
|
||||
}
|
||||
strike->fPrev = strike->fNext = nullptr;
|
||||
strike->fStrikeCache = nullptr;
|
||||
strike->unref();
|
||||
}
|
||||
|
||||
void SkStrikeCache::ValidateGlyphCacheDataSize() {
|
||||
#ifdef SK_DEBUG
|
||||
GlobalStrikeCache()->validateGlyphCacheDataSize();
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef SK_DEBUG
|
||||
void SkStrikeCache::validateGlyphCacheDataSize() const {
|
||||
this->forEachStrike(
|
||||
[](const Strike& strike) {
|
||||
SkASSERT(strike.fMemoryUsed == strike.fScalerCache.recalculateMemoryUsed());
|
||||
});
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef SK_DEBUG
|
||||
void SkStrikeCache::validate() const {
|
||||
size_t computedBytes = 0;
|
||||
|
@ -55,16 +55,14 @@ public:
|
||||
, fPinner{std::move(pinner)} {}
|
||||
|
||||
SkGlyph* mergeGlyphAndImage(SkPackedGlyphID toID, const SkGlyph& from) {
|
||||
auto [glyph, delta] = fScalerCache.mergeGlyphAndImage(toID, from);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
auto [glyph, increase] = fScalerCache.mergeGlyphAndImage(toID, from);
|
||||
this->updateDelta(increase);
|
||||
return glyph;
|
||||
}
|
||||
|
||||
const SkPath* mergePath(SkGlyph* glyph, const SkPath* path) {
|
||||
auto [glyphPath, delta] = fScalerCache.mergePath(glyph, path);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
auto [glyphPath, increase] = fScalerCache.mergePath(glyph, path);
|
||||
this->updateDelta(increase);
|
||||
return glyphPath;
|
||||
}
|
||||
|
||||
@ -83,32 +81,28 @@ public:
|
||||
|
||||
SkSpan<const SkGlyph*> metrics(SkSpan<const SkGlyphID> glyphIDs,
|
||||
const SkGlyph* results[]) {
|
||||
auto [glyphs, delta] = fScalerCache.metrics(glyphIDs, results);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
auto [glyphs, increase] = fScalerCache.metrics(glyphIDs, results);
|
||||
this->updateDelta(increase);
|
||||
return glyphs;
|
||||
}
|
||||
|
||||
SkSpan<const SkGlyph*> preparePaths(SkSpan<const SkGlyphID> glyphIDs,
|
||||
const SkGlyph* results[]) {
|
||||
auto [glyphs, delta] = fScalerCache.preparePaths(glyphIDs, results);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
auto [glyphs, increase] = fScalerCache.preparePaths(glyphIDs, results);
|
||||
this->updateDelta(increase);
|
||||
return glyphs;
|
||||
}
|
||||
|
||||
SkSpan<const SkGlyph*> prepareImages(SkSpan<const SkPackedGlyphID> glyphIDs,
|
||||
const SkGlyph* results[]) {
|
||||
auto [glyphs, delta] = fScalerCache.prepareImages(glyphIDs, results);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
auto [glyphs, increase] = fScalerCache.prepareImages(glyphIDs, results);
|
||||
this->updateDelta(increase);
|
||||
return glyphs;
|
||||
}
|
||||
|
||||
void prepareForDrawingMasksCPU(SkDrawableGlyphBuffer* drawables) {
|
||||
size_t delta = fScalerCache.prepareForDrawingMasksCPU(drawables);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
size_t increase = fScalerCache.prepareForDrawingMasksCPU(drawables);
|
||||
this->updateDelta(increase);
|
||||
}
|
||||
|
||||
const SkGlyphPositionRoundingSpec& roundingSpec() const override {
|
||||
@ -121,30 +115,37 @@ public:
|
||||
|
||||
void prepareForMaskDrawing(
|
||||
SkDrawableGlyphBuffer* drawbles, SkSourceGlyphBuffer* rejects) override {
|
||||
size_t delta = fScalerCache.prepareForMaskDrawing(drawbles, rejects);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
size_t increase = fScalerCache.prepareForMaskDrawing(drawbles, rejects);
|
||||
this->updateDelta(increase);
|
||||
}
|
||||
|
||||
void prepareForSDFTDrawing(
|
||||
SkDrawableGlyphBuffer* drawbles, SkSourceGlyphBuffer* rejects) override {
|
||||
size_t delta = fScalerCache.prepareForSDFTDrawing(drawbles, rejects);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
size_t increase = fScalerCache.prepareForSDFTDrawing(drawbles, rejects);
|
||||
this->updateDelta(increase);
|
||||
}
|
||||
|
||||
void prepareForPathDrawing(
|
||||
SkDrawableGlyphBuffer* drawbles, SkSourceGlyphBuffer* rejects) override {
|
||||
size_t delta = fScalerCache.prepareForPathDrawing(drawbles, rejects);
|
||||
fMemoryUsed += delta;
|
||||
SkASSERT(fScalerCache.recalculateMemoryUsed() == fMemoryUsed);
|
||||
size_t increase = fScalerCache.prepareForPathDrawing(drawbles, rejects);
|
||||
this->updateDelta(increase);
|
||||
}
|
||||
|
||||
void onAboutToExitScope() override {
|
||||
fStrikeCache->attachStrike(this);
|
||||
this->unref();
|
||||
}
|
||||
|
||||
SkStrikeCache* const fStrikeCache;
|
||||
void updateDelta(size_t increase) {
|
||||
if (increase != 0) {
|
||||
SkAutoSpinlock lock{fStrikeCache->fLock};
|
||||
fMemoryUsed += increase;
|
||||
if (fStrikeCache != nullptr) {
|
||||
fStrikeCache->fTotalMemoryUsed += increase;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SkStrikeCache* fStrikeCache{nullptr};
|
||||
Strike* fNext{nullptr};
|
||||
Strike* fPrev{nullptr};
|
||||
SkScalerCache fScalerCache;
|
||||
@ -154,13 +155,12 @@ public:
|
||||
|
||||
class ExclusiveStrikePtr {
|
||||
public:
|
||||
explicit ExclusiveStrikePtr(Strike*);
|
||||
explicit ExclusiveStrikePtr(sk_sp<Strike> strike);
|
||||
ExclusiveStrikePtr();
|
||||
ExclusiveStrikePtr(const ExclusiveStrikePtr&) = delete;
|
||||
ExclusiveStrikePtr& operator = (const ExclusiveStrikePtr&) = delete;
|
||||
ExclusiveStrikePtr(ExclusiveStrikePtr&&);
|
||||
ExclusiveStrikePtr& operator = (ExclusiveStrikePtr&&);
|
||||
~ExclusiveStrikePtr();
|
||||
|
||||
Strike* get() const;
|
||||
Strike* operator -> () const;
|
||||
@ -171,7 +171,7 @@ public:
|
||||
friend bool operator == (decltype(nullptr), const ExclusiveStrikePtr&);
|
||||
|
||||
private:
|
||||
Strike* fStrike;
|
||||
sk_sp<Strike> fStrike;
|
||||
};
|
||||
|
||||
static SkStrikeCache* GlobalStrikeCache();
|
||||
@ -194,7 +194,6 @@ public:
|
||||
const SkTypeface& typeface) override;
|
||||
|
||||
static void PurgeAll();
|
||||
static void ValidateGlyphCacheDataSize();
|
||||
static void Dump();
|
||||
|
||||
// Dump memory usage statistics of all the attaches caches in the process using the
|
||||
@ -213,12 +212,6 @@ public:
|
||||
|
||||
int getCachePointSizeLimit() const;
|
||||
int setCachePointSizeLimit(int limit);
|
||||
#ifdef SK_DEBUG
|
||||
// Make sure that each glyph cache's memory tracking and actual memory used are in sync.
|
||||
void validateGlyphCacheDataSize() const;
|
||||
#else
|
||||
void validateGlyphCacheDataSize() const {}
|
||||
#endif
|
||||
|
||||
private:
|
||||
#ifdef SK_DEBUG
|
||||
@ -228,21 +221,20 @@ private:
|
||||
void validate() const {}
|
||||
#endif
|
||||
|
||||
Strike* findAndDetachStrike(const SkDescriptor&) SK_EXCLUDES(fLock);
|
||||
Strike* createStrike(
|
||||
sk_sp<Strike> findStrikeOrNull(const SkDescriptor& desc) SK_EXCLUDES(fLock);
|
||||
sk_sp<Strike> createStrike(
|
||||
const SkDescriptor& desc,
|
||||
std::unique_ptr<SkScalerContext> scaler,
|
||||
SkFontMetrics* maybeMetrics = nullptr,
|
||||
std::unique_ptr<SkStrikePinner> = nullptr);
|
||||
Strike* findOrCreateStrike(
|
||||
std::unique_ptr<SkStrikePinner> = nullptr) SK_EXCLUDES(fLock);
|
||||
sk_sp<Strike> findOrCreateStrike(
|
||||
const SkDescriptor& desc,
|
||||
const SkScalerContextEffects& effects,
|
||||
const SkTypeface& typeface) SK_EXCLUDES(fLock);
|
||||
void attachStrike(Strike* strike) SK_EXCLUDES(fLock);
|
||||
|
||||
// The following methods can only be called when mutex is already held.
|
||||
void internalDetachStrike(Strike* strike) SK_REQUIRES(fLock);
|
||||
void internalAttachToHead(Strike* strike) SK_REQUIRES(fLock);
|
||||
void internalRemoveStrike(Strike* strike) SK_REQUIRES(fLock);
|
||||
void internalAttachToHead(sk_sp<Strike> strike) SK_REQUIRES(fLock);
|
||||
|
||||
// Checkout budgets, modulated by the specified min-bytes-needed-to-purge,
|
||||
// and attempt to purge caches to match.
|
||||
|
@ -403,7 +403,6 @@ DEF_TEST(SkRemoteGlyphCache_ClientMemoryAccounting, reporter) {
|
||||
// Client.
|
||||
REPORTER_ASSERT(reporter,
|
||||
client.readStrikeData(serverStrikeData.data(), serverStrikeData.size()));
|
||||
SkStrikeCache::ValidateGlyphCacheDataSize();
|
||||
|
||||
// Must unlock everything on termination, otherwise valgrind complains about memory leaks.
|
||||
discardableManager->unlockAndDeleteAll();
|
||||
@ -487,7 +486,6 @@ DEF_GPUTEST_FOR_RENDERING_CONTEXTS(SkRemoteGlyphCache_DrawTextAsPath, reporter,
|
||||
SkBitmap actual = RasterBlob(clientBlob, 10, 10, paint, ctxInfo.grContext());
|
||||
compare_blobs(expected, actual, reporter, 1);
|
||||
REPORTER_ASSERT(reporter, !discardableManager->hasCacheMiss());
|
||||
SkStrikeCache::ValidateGlyphCacheDataSize();
|
||||
|
||||
// Must unlock everything on termination, otherwise valgrind complains about memory leaks.
|
||||
discardableManager->unlockAndDeleteAll();
|
||||
@ -564,7 +562,6 @@ DEF_GPUTEST_FOR_RENDERING_CONTEXTS(SkRemoteGlyphCache_DrawTextAsMaskWithPathFall
|
||||
SkBitmap actual = RasterBlob(clientBlob, 10, 10, paint, ctxInfo.grContext());
|
||||
compare_blobs(expected, actual, reporter);
|
||||
REPORTER_ASSERT(reporter, !discardableManager->hasCacheMiss());
|
||||
SkStrikeCache::ValidateGlyphCacheDataSize();
|
||||
|
||||
// Must unlock everything on termination, otherwise valgrind complains about memory leaks.
|
||||
discardableManager->unlockAndDeleteAll();
|
||||
@ -639,7 +636,6 @@ DEF_GPUTEST_FOR_RENDERING_CONTEXTS(SkRemoteGlyphCache_DrawTextAsSDFTWithAllARGBF
|
||||
// interpolation.
|
||||
compare_blobs(expected, actual, reporter, 36);
|
||||
REPORTER_ASSERT(reporter, !discardableManager->hasCacheMiss());
|
||||
SkStrikeCache::ValidateGlyphCacheDataSize();
|
||||
|
||||
// Must unlock everything on termination, otherwise valgrind complains about memory leaks.
|
||||
discardableManager->unlockAndDeleteAll();
|
||||
@ -677,7 +673,6 @@ DEF_GPUTEST_FOR_RENDERING_CONTEXTS(SkRemoteGlyphCache_DrawTextXY, reporter, ctxI
|
||||
SkBitmap actual = RasterBlob(clientBlob, 10, 10, paint, ctxInfo.grContext(), nullptr, 0.5);
|
||||
compare_blobs(expected, actual, reporter);
|
||||
REPORTER_ASSERT(reporter, !discardableManager->hasCacheMiss());
|
||||
SkStrikeCache::ValidateGlyphCacheDataSize();
|
||||
|
||||
// Must unlock everything on termination, otherwise valgrind complains about memory leaks.
|
||||
discardableManager->unlockAndDeleteAll();
|
||||
@ -723,7 +718,6 @@ DEF_GPUTEST_FOR_RENDERING_CONTEXTS(SkRemoteGlyphCache_DrawTextAsDFT, reporter, c
|
||||
SkBitmap actual = RasterBlob(clientBlob, 10, 10, paint, ctxInfo.grContext(), &matrix);
|
||||
compare_blobs(expected, actual, reporter);
|
||||
REPORTER_ASSERT(reporter, !discardableManager->hasCacheMiss());
|
||||
SkStrikeCache::ValidateGlyphCacheDataSize();
|
||||
|
||||
// Must unlock everything on termination, otherwise valgrind complains about memory leaks.
|
||||
discardableManager->unlockAndDeleteAll();
|
||||
@ -813,7 +807,6 @@ DEF_GPUTEST_FOR_RENDERING_CONTEXTS(SkRemoteGlyphCache_TypefaceWithNoPaths, repor
|
||||
|
||||
RasterBlob(clientBlob, 500, 500, paint, ctxInfo.grContext());
|
||||
REPORTER_ASSERT(reporter, !discardableManager->hasCacheMiss());
|
||||
SkStrikeCache::ValidateGlyphCacheDataSize();
|
||||
discardableManager->resetCacheMissCounts();
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user