2014-07-17 13:58:01 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2014 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
2014-09-16 17:39:55 +00:00
|
|
|
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "include/core/SkCanvas.h"
|
|
|
|
#include "include/core/SkGraphics.h"
|
|
|
|
#include "include/core/SkPicture.h"
|
|
|
|
#include "include/core/SkPictureRecorder.h"
|
|
|
|
#include "include/core/SkSurface.h"
|
|
|
|
#include "src/core/SkBitmapCache.h"
|
2020-07-14 21:16:32 +00:00
|
|
|
#include "src/core/SkMipmap.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/core/SkResourceCache.h"
|
2019-08-01 19:35:20 +00:00
|
|
|
#include "src/image/SkImage_Base.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/lazy/SkDiscardableMemoryPool.h"
|
|
|
|
#include "tests/Test.h"
|
2014-07-17 13:58:01 +00:00
|
|
|
|
2014-09-16 17:39:55 +00:00
|
|
|
////////////////////////////////////////////////////////////////////////////////////////
|
2014-09-04 17:50:53 +00:00
|
|
|
|
2014-10-08 12:17:12 +00:00
|
|
|
enum LockedState {
|
|
|
|
kNotLocked,
|
|
|
|
kLocked,
|
|
|
|
};
|
|
|
|
|
|
|
|
enum CachedState {
|
|
|
|
kNotInCache,
|
|
|
|
kInCache,
|
|
|
|
};
|
|
|
|
|
|
|
|
static void check_data(skiatest::Reporter* reporter, const SkCachedData* data,
|
|
|
|
int refcnt, CachedState cacheState, LockedState lockedState) {
|
|
|
|
REPORTER_ASSERT(reporter, data->testing_only_getRefCnt() == refcnt);
|
|
|
|
REPORTER_ASSERT(reporter, data->testing_only_isInCache() == (kInCache == cacheState));
|
2015-08-27 14:41:13 +00:00
|
|
|
bool isLocked = (data->data() != nullptr);
|
2014-10-08 12:17:12 +00:00
|
|
|
REPORTER_ASSERT(reporter, isLocked == (lockedState == kLocked));
|
|
|
|
}
|
|
|
|
|
|
|
|
static void test_mipmapcache(skiatest::Reporter* reporter, SkResourceCache* cache) {
|
|
|
|
cache->purgeAll();
|
|
|
|
|
|
|
|
SkBitmap src;
|
|
|
|
src.allocN32Pixels(5, 5);
|
|
|
|
src.setImmutable();
|
2020-12-23 16:50:36 +00:00
|
|
|
sk_sp<SkImage> img = src.asImage();
|
2019-08-01 19:35:20 +00:00
|
|
|
const auto desc = SkBitmapCacheDesc::Make(img.get());
|
2014-10-08 12:17:12 +00:00
|
|
|
|
2020-07-14 21:16:32 +00:00
|
|
|
const SkMipmap* mipmap = SkMipmapCache::FindAndRef(desc, cache);
|
2015-08-27 14:41:13 +00:00
|
|
|
REPORTER_ASSERT(reporter, nullptr == mipmap);
|
2014-10-08 12:17:12 +00:00
|
|
|
|
2020-07-14 21:16:32 +00:00
|
|
|
mipmap = SkMipmapCache::AddAndRef(as_IB(img.get()), cache);
|
2014-10-08 12:17:12 +00:00
|
|
|
REPORTER_ASSERT(reporter, mipmap);
|
2015-02-24 21:54:23 +00:00
|
|
|
|
|
|
|
{
|
2020-07-14 21:16:32 +00:00
|
|
|
const SkMipmap* mm = SkMipmapCache::FindAndRef(desc, cache);
|
2015-02-24 21:54:23 +00:00
|
|
|
REPORTER_ASSERT(reporter, mm);
|
|
|
|
REPORTER_ASSERT(reporter, mm == mipmap);
|
|
|
|
mm->unref();
|
|
|
|
}
|
|
|
|
|
2014-10-08 12:17:12 +00:00
|
|
|
check_data(reporter, mipmap, 2, kInCache, kLocked);
|
|
|
|
|
|
|
|
mipmap->unref();
|
|
|
|
// tricky, since technically after this I'm no longer an owner, but since the cache is
|
|
|
|
// local, I know it won't get purged behind my back
|
|
|
|
check_data(reporter, mipmap, 1, kInCache, kNotLocked);
|
|
|
|
|
|
|
|
// find us again
|
2020-07-14 21:16:32 +00:00
|
|
|
mipmap = SkMipmapCache::FindAndRef(desc, cache);
|
2014-10-08 12:17:12 +00:00
|
|
|
check_data(reporter, mipmap, 2, kInCache, kLocked);
|
|
|
|
|
|
|
|
cache->purgeAll();
|
|
|
|
check_data(reporter, mipmap, 1, kNotInCache, kLocked);
|
|
|
|
|
|
|
|
mipmap->unref();
|
|
|
|
}
|
|
|
|
|
2015-02-24 21:54:23 +00:00
|
|
|
static void test_mipmap_notify(skiatest::Reporter* reporter, SkResourceCache* cache) {
|
|
|
|
const int N = 3;
|
2016-06-10 18:41:47 +00:00
|
|
|
|
2015-02-24 21:54:23 +00:00
|
|
|
SkBitmap src[N];
|
2018-10-15 19:32:06 +00:00
|
|
|
sk_sp<SkImage> img[N];
|
|
|
|
SkBitmapCacheDesc desc[N];
|
2015-02-24 21:54:23 +00:00
|
|
|
for (int i = 0; i < N; ++i) {
|
|
|
|
src[i].allocN32Pixels(5, 5);
|
|
|
|
src[i].setImmutable();
|
2020-12-23 16:50:36 +00:00
|
|
|
img[i] = src[i].asImage();
|
2020-07-14 21:16:32 +00:00
|
|
|
SkMipmapCache::AddAndRef(as_IB(img[i].get()), cache)->unref();
|
2019-08-01 19:35:20 +00:00
|
|
|
desc[i] = SkBitmapCacheDesc::Make(img[i].get());
|
2015-02-24 21:54:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; i < N; ++i) {
|
2020-07-14 21:16:32 +00:00
|
|
|
const SkMipmap* mipmap = SkMipmapCache::FindAndRef(desc[i], cache);
|
2018-10-15 19:32:06 +00:00
|
|
|
// We're always using a local cache, so we know we won't be purged by other threads
|
|
|
|
REPORTER_ASSERT(reporter, mipmap);
|
2015-02-24 21:54:23 +00:00
|
|
|
SkSafeUnref(mipmap);
|
|
|
|
|
2018-10-15 19:32:06 +00:00
|
|
|
img[i].reset(); // delete the image, which *should not* remove us from the cache
|
2020-07-14 21:16:32 +00:00
|
|
|
mipmap = SkMipmapCache::FindAndRef(desc[i], cache);
|
2018-10-15 19:32:06 +00:00
|
|
|
REPORTER_ASSERT(reporter, mipmap);
|
|
|
|
SkSafeUnref(mipmap);
|
2018-10-15 16:31:10 +00:00
|
|
|
|
2018-10-15 19:32:06 +00:00
|
|
|
src[i].reset(); // delete the underlying pixelref, which *should* remove us from the cache
|
2020-07-14 21:16:32 +00:00
|
|
|
mipmap = SkMipmapCache::FindAndRef(desc[i], cache);
|
2015-02-24 21:54:23 +00:00
|
|
|
REPORTER_ASSERT(reporter, !mipmap);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/lazy/SkDiscardableMemoryPool.h"
|
2014-09-16 17:39:55 +00:00
|
|
|
|
2017-08-28 14:34:05 +00:00
|
|
|
static SkDiscardableMemoryPool* gPool = nullptr;
|
2020-08-07 16:14:14 +00:00
|
|
|
static int gFactoryCalls = 0;
|
|
|
|
|
2015-11-10 12:55:15 +00:00
|
|
|
static SkDiscardableMemory* pool_factory(size_t bytes) {
|
|
|
|
SkASSERT(gPool);
|
2020-08-07 16:14:14 +00:00
|
|
|
gFactoryCalls++;
|
2015-11-10 12:55:15 +00:00
|
|
|
return gPool->create(bytes);
|
|
|
|
}
|
2014-09-11 17:49:52 +00:00
|
|
|
|
2015-11-10 12:55:15 +00:00
|
|
|
static void testBitmapCache_discarded_bitmap(skiatest::Reporter* reporter, SkResourceCache* cache,
|
|
|
|
SkResourceCache::DiscardableFactory factory) {
|
2014-10-08 12:17:12 +00:00
|
|
|
test_mipmapcache(reporter, cache);
|
2015-02-24 21:54:23 +00:00
|
|
|
test_mipmap_notify(reporter, cache);
|
2014-09-11 17:49:52 +00:00
|
|
|
}
|
2015-09-18 15:07:31 +00:00
|
|
|
|
2015-11-10 12:55:15 +00:00
|
|
|
DEF_TEST(BitmapCache_discarded_bitmap, reporter) {
|
|
|
|
const size_t byteLimit = 100 * 1024;
|
|
|
|
{
|
|
|
|
SkResourceCache cache(byteLimit);
|
|
|
|
testBitmapCache_discarded_bitmap(reporter, &cache, nullptr);
|
|
|
|
}
|
|
|
|
{
|
2017-04-25 12:58:57 +00:00
|
|
|
sk_sp<SkDiscardableMemoryPool> pool(SkDiscardableMemoryPool::Make(byteLimit));
|
2015-11-10 12:55:15 +00:00
|
|
|
gPool = pool.get();
|
|
|
|
SkResourceCache::DiscardableFactory factory = pool_factory;
|
|
|
|
SkResourceCache cache(factory);
|
|
|
|
testBitmapCache_discarded_bitmap(reporter, &cache, factory);
|
|
|
|
}
|
2020-08-07 16:14:14 +00:00
|
|
|
REPORTER_ASSERT(reporter, gFactoryCalls > 0);
|
2015-11-10 12:55:15 +00:00
|
|
|
}
|
|
|
|
|
2015-09-18 15:07:31 +00:00
|
|
|
static void test_discarded_image(skiatest::Reporter* reporter, const SkMatrix& transform,
|
2016-03-17 17:51:11 +00:00
|
|
|
sk_sp<SkImage> (*buildImage)()) {
|
2016-03-24 01:59:25 +00:00
|
|
|
auto surface(SkSurface::MakeRasterN32Premul(10, 10));
|
2015-09-18 15:07:31 +00:00
|
|
|
SkCanvas* canvas = surface->getCanvas();
|
|
|
|
|
|
|
|
// SkBitmapCache is global, so other threads could be evicting our bitmaps. Loop a few times
|
|
|
|
// to mitigate this risk.
|
|
|
|
const unsigned kRepeatCount = 42;
|
|
|
|
for (unsigned i = 0; i < kRepeatCount; ++i) {
|
|
|
|
SkAutoCanvasRestore acr(canvas, true);
|
|
|
|
|
2016-03-17 17:51:11 +00:00
|
|
|
sk_sp<SkImage> image(buildImage());
|
2015-09-18 15:07:31 +00:00
|
|
|
|
|
|
|
// draw the image (with a transform, to tickle different code paths) to ensure
|
|
|
|
// any associated resources get cached
|
|
|
|
canvas->concat(transform);
|
2021-01-28 02:21:08 +00:00
|
|
|
// always use high quality to ensure caching when scaled
|
|
|
|
canvas->drawImage(image, 0, 0, SkSamplingOptions({1.0f/3, 1.0f/3}));
|
2015-09-18 15:07:31 +00:00
|
|
|
|
2017-03-25 13:51:00 +00:00
|
|
|
const auto desc = SkBitmapCacheDesc::Make(image.get());
|
2015-09-18 15:07:31 +00:00
|
|
|
|
|
|
|
// delete the image
|
|
|
|
image.reset(nullptr);
|
|
|
|
|
|
|
|
// all resources should have been purged
|
|
|
|
SkBitmap result;
|
2017-03-25 13:51:00 +00:00
|
|
|
REPORTER_ASSERT(reporter, !SkBitmapCache::Find(desc, &result));
|
2015-09-18 15:07:31 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Verify that associated bitmap cache entries are purged on SkImage destruction.
|
|
|
|
DEF_TEST(BitmapCache_discarded_image, reporter) {
|
|
|
|
// Cache entries associated with SkImages fall into two categories:
|
|
|
|
//
|
|
|
|
// 1) generated image bitmaps (managed by the image cacherator)
|
|
|
|
// 2) scaled/resampled bitmaps (cached when HQ filters are used)
|
|
|
|
//
|
|
|
|
// To exercise the first cache type, we use generated/picture-backed SkImages.
|
|
|
|
// To exercise the latter, we draw scaled bitmap images using HQ filters.
|
|
|
|
|
|
|
|
const SkMatrix xforms[] = {
|
2020-05-21 16:11:27 +00:00
|
|
|
SkMatrix::Scale(1, 1),
|
|
|
|
SkMatrix::Scale(1.7f, 0.5f),
|
2015-09-18 15:07:31 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
for (size_t i = 0; i < SK_ARRAY_COUNT(xforms); ++i) {
|
|
|
|
test_discarded_image(reporter, xforms[i], []() {
|
2016-03-24 01:59:25 +00:00
|
|
|
auto surface(SkSurface::MakeRasterN32Premul(10, 10));
|
2015-09-18 15:07:31 +00:00
|
|
|
surface->getCanvas()->clear(SK_ColorCYAN);
|
2016-03-17 17:51:11 +00:00
|
|
|
return surface->makeImageSnapshot();
|
2015-09-18 15:07:31 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
test_discarded_image(reporter, xforms[i], []() {
|
|
|
|
SkPictureRecorder recorder;
|
|
|
|
SkCanvas* canvas = recorder.beginRecording(10, 10);
|
|
|
|
canvas->clear(SK_ColorCYAN);
|
2016-03-18 14:25:55 +00:00
|
|
|
return SkImage::MakeFromPicture(recorder.finishRecordingAsPicture(),
|
2016-12-16 16:55:18 +00:00
|
|
|
SkISize::Make(10, 10), nullptr, nullptr,
|
2017-01-09 17:38:59 +00:00
|
|
|
SkImage::BitDepth::kU8,
|
2017-02-07 18:56:11 +00:00
|
|
|
SkColorSpace::MakeSRGB());
|
2015-09-18 15:07:31 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
2017-04-11 16:03:44 +00:00
|
|
|
|
|
|
|
///////////////////////////////////////////////////////////////////////////////////////////////////
|
|
|
|
|
|
|
|
static void* gTestNamespace;
|
|
|
|
|
|
|
|
struct TestKey : SkResourceCache::Key {
|
|
|
|
int32_t fData;
|
|
|
|
|
|
|
|
TestKey(int sharedID, int32_t data) : fData(data) {
|
|
|
|
this->init(&gTestNamespace, sharedID, sizeof(fData));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
struct TestRec : SkResourceCache::Rec {
|
|
|
|
enum {
|
|
|
|
kDidInstall = 1 << 0,
|
|
|
|
};
|
|
|
|
|
|
|
|
TestKey fKey;
|
|
|
|
int* fFlags;
|
|
|
|
bool fCanBePurged;
|
|
|
|
|
|
|
|
TestRec(int sharedID, int32_t data, int* flagPtr) : fKey(sharedID, data), fFlags(flagPtr) {
|
|
|
|
fCanBePurged = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const Key& getKey() const override { return fKey; }
|
|
|
|
size_t bytesUsed() const override { return 1024; /* just need a value */ }
|
|
|
|
bool canBePurged() override { return fCanBePurged; }
|
|
|
|
void postAddInstall(void*) override {
|
|
|
|
*fFlags |= kDidInstall;
|
|
|
|
}
|
|
|
|
const char* getCategory() const override { return "test-category"; }
|
|
|
|
};
|
|
|
|
|
|
|
|
static void test_duplicate_add(SkResourceCache* cache, skiatest::Reporter* reporter,
|
|
|
|
bool purgable) {
|
|
|
|
int sharedID = 1;
|
|
|
|
int data = 0;
|
|
|
|
|
|
|
|
int flags0 = 0, flags1 = 0;
|
|
|
|
|
2019-12-11 15:45:01 +00:00
|
|
|
auto rec0 = std::make_unique<TestRec>(sharedID, data, &flags0);
|
|
|
|
auto rec1 = std::make_unique<TestRec>(sharedID, data, &flags1);
|
2017-04-11 16:03:44 +00:00
|
|
|
SkASSERT(rec0->getKey() == rec1->getKey());
|
|
|
|
|
|
|
|
TestRec* r0 = rec0.get(); // save the bare-pointer since we will release rec0
|
|
|
|
r0->fCanBePurged = purgable;
|
|
|
|
|
|
|
|
REPORTER_ASSERT(reporter, !(flags0 & TestRec::kDidInstall));
|
|
|
|
REPORTER_ASSERT(reporter, !(flags1 & TestRec::kDidInstall));
|
|
|
|
|
|
|
|
cache->add(rec0.release(), nullptr);
|
|
|
|
REPORTER_ASSERT(reporter, flags0 & TestRec::kDidInstall);
|
|
|
|
REPORTER_ASSERT(reporter, !(flags1 & TestRec::kDidInstall));
|
|
|
|
flags0 = 0; // reset the flag
|
|
|
|
|
|
|
|
cache->add(rec1.release(), nullptr);
|
|
|
|
if (purgable) {
|
|
|
|
// we purged rec0, and did install rec1
|
|
|
|
REPORTER_ASSERT(reporter, !(flags0 & TestRec::kDidInstall));
|
|
|
|
REPORTER_ASSERT(reporter, flags1 & TestRec::kDidInstall);
|
|
|
|
} else {
|
|
|
|
// we re-used rec0 and did not install rec1
|
|
|
|
REPORTER_ASSERT(reporter, flags0 & TestRec::kDidInstall);
|
|
|
|
REPORTER_ASSERT(reporter, !(flags1 & TestRec::kDidInstall));
|
|
|
|
r0->fCanBePurged = true; // so we can cleanup the cache
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Test behavior when the same key is added more than once.
|
|
|
|
*/
|
|
|
|
DEF_TEST(ResourceCache_purge, reporter) {
|
|
|
|
for (bool purgable : { false, true }) {
|
|
|
|
{
|
|
|
|
SkResourceCache cache(1024 * 1024);
|
|
|
|
test_duplicate_add(&cache, reporter, purgable);
|
|
|
|
}
|
|
|
|
{
|
|
|
|
SkResourceCache cache(SkDiscardableMemory::Create);
|
|
|
|
test_duplicate_add(&cache, reporter, purgable);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|