2013-12-11 15:30:24 +00:00
|
|
|
/*
|
2013-07-23 19:13:54 +00:00
|
|
|
* Copyright 2013 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
2013-12-09 22:29:30 +00:00
|
|
|
#include "SkDiscardableMemory.h"
|
2013-07-23 19:13:54 +00:00
|
|
|
#include "SkScaledImageCache.h"
|
2014-01-24 20:56:26 +00:00
|
|
|
#include "Test.h"
|
2013-07-23 19:13:54 +00:00
|
|
|
|
|
|
|
static void make_bm(SkBitmap* bm, int w, int h) {
|
2014-02-13 14:41:43 +00:00
|
|
|
bm->allocN32Pixels(w, h);
|
2013-07-23 19:13:54 +00:00
|
|
|
}
|
|
|
|
|
2013-12-09 22:29:30 +00:00
|
|
|
static const int COUNT = 10;
|
|
|
|
static const int DIM = 256;
|
2013-07-23 19:13:54 +00:00
|
|
|
|
2013-12-09 22:29:30 +00:00
|
|
|
static void test_cache(skiatest::Reporter* reporter, SkScaledImageCache& cache,
|
|
|
|
bool testPurge) {
|
|
|
|
SkScaledImageCache::ID* id;
|
2013-12-10 07:02:03 +00:00
|
|
|
|
2013-07-23 19:13:54 +00:00
|
|
|
SkBitmap bm[COUNT];
|
2013-12-10 07:02:03 +00:00
|
|
|
|
2013-12-11 15:30:24 +00:00
|
|
|
const SkScalar scale = 2;
|
|
|
|
for (int i = 0; i < COUNT; ++i) {
|
|
|
|
make_bm(&bm[i], DIM, DIM);
|
|
|
|
}
|
2013-12-14 15:12:48 +00:00
|
|
|
|
2013-07-23 19:27:48 +00:00
|
|
|
for (int i = 0; i < COUNT; ++i) {
|
2013-07-23 19:13:54 +00:00
|
|
|
SkBitmap tmp;
|
2013-12-10 07:02:03 +00:00
|
|
|
|
2013-12-11 15:30:24 +00:00
|
|
|
SkScaledImageCache::ID* id = cache.findAndLock(bm[i], scale, scale, &tmp);
|
2013-07-23 19:13:54 +00:00
|
|
|
REPORTER_ASSERT(reporter, NULL == id);
|
2013-12-10 07:02:03 +00:00
|
|
|
|
2013-07-23 19:13:54 +00:00
|
|
|
make_bm(&tmp, DIM, DIM);
|
|
|
|
id = cache.addAndLock(bm[i], scale, scale, tmp);
|
|
|
|
REPORTER_ASSERT(reporter, NULL != id);
|
2013-12-10 07:02:03 +00:00
|
|
|
|
2013-07-23 19:13:54 +00:00
|
|
|
SkBitmap tmp2;
|
|
|
|
SkScaledImageCache::ID* id2 = cache.findAndLock(bm[i], scale, scale,
|
|
|
|
&tmp2);
|
|
|
|
REPORTER_ASSERT(reporter, id == id2);
|
|
|
|
REPORTER_ASSERT(reporter, tmp.pixelRef() == tmp2.pixelRef());
|
|
|
|
REPORTER_ASSERT(reporter, tmp.width() == tmp2.width());
|
|
|
|
REPORTER_ASSERT(reporter, tmp.height() == tmp2.height());
|
|
|
|
cache.unlock(id2);
|
2013-12-10 07:02:03 +00:00
|
|
|
|
2013-07-23 19:13:54 +00:00
|
|
|
cache.unlock(id);
|
|
|
|
}
|
2013-12-10 07:02:03 +00:00
|
|
|
|
2013-12-09 22:29:30 +00:00
|
|
|
if (testPurge) {
|
|
|
|
// stress test, should trigger purges
|
2013-12-11 15:30:24 +00:00
|
|
|
float incScale = 2;
|
2013-12-09 22:29:30 +00:00
|
|
|
for (size_t i = 0; i < COUNT * 100; ++i) {
|
2013-12-11 15:30:24 +00:00
|
|
|
incScale += 1;
|
2013-12-10 07:02:03 +00:00
|
|
|
|
2013-12-09 22:29:30 +00:00
|
|
|
SkBitmap tmp;
|
|
|
|
make_bm(&tmp, DIM, DIM);
|
2013-12-11 15:30:24 +00:00
|
|
|
|
|
|
|
SkScaledImageCache::ID* id = cache.addAndLock(bm[0], incScale,
|
|
|
|
incScale, tmp);
|
2013-12-09 22:29:30 +00:00
|
|
|
REPORTER_ASSERT(reporter, NULL != id);
|
|
|
|
cache.unlock(id);
|
|
|
|
}
|
2013-07-23 19:13:54 +00:00
|
|
|
}
|
2013-12-11 15:30:24 +00:00
|
|
|
|
|
|
|
// test the originals after all that purging
|
|
|
|
for (int i = 0; i < COUNT; ++i) {
|
|
|
|
SkBitmap tmp;
|
|
|
|
id = cache.findAndLock(bm[i], scale, scale, &tmp);
|
|
|
|
if (id) {
|
|
|
|
cache.unlock(id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-23 19:13:54 +00:00
|
|
|
cache.setByteLimit(0);
|
|
|
|
}
|
|
|
|
|
2013-12-11 15:30:24 +00:00
|
|
|
#include "SkDiscardableMemoryPool.h"
|
|
|
|
|
|
|
|
static SkDiscardableMemoryPool* gPool;
|
|
|
|
static SkDiscardableMemory* pool_factory(size_t bytes) {
|
2014-04-04 16:43:38 +00:00
|
|
|
SkASSERT(gPool);
|
2013-12-11 15:30:24 +00:00
|
|
|
return gPool->create(bytes);
|
|
|
|
}
|
|
|
|
|
2013-12-12 21:11:12 +00:00
|
|
|
DEF_TEST(ImageCache, reporter) {
|
2013-12-11 15:30:24 +00:00
|
|
|
static const size_t defLimit = DIM * DIM * 4 * COUNT + 1024; // 1K slop
|
|
|
|
|
2013-12-09 22:29:30 +00:00
|
|
|
{
|
|
|
|
SkScaledImageCache cache(defLimit);
|
|
|
|
test_cache(reporter, cache, true);
|
|
|
|
}
|
2013-12-11 15:30:24 +00:00
|
|
|
{
|
2014-04-04 16:43:38 +00:00
|
|
|
SkAutoTUnref<SkDiscardableMemoryPool> pool(
|
|
|
|
SkDiscardableMemoryPool::Create(defLimit, NULL));
|
|
|
|
gPool = pool.get();
|
2013-12-11 15:30:24 +00:00
|
|
|
SkScaledImageCache cache(pool_factory);
|
|
|
|
test_cache(reporter, cache, true);
|
|
|
|
}
|
2013-12-09 22:29:30 +00:00
|
|
|
{
|
|
|
|
SkScaledImageCache cache(SkDiscardableMemory::Create);
|
|
|
|
test_cache(reporter, cache, false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
DEF_TEST(ImageCache_doubleAdd, r) {
|
|
|
|
// Adding the same key twice should be safe.
|
2014-03-05 13:44:18 +00:00
|
|
|
SkScaledImageCache cache(4096);
|
2013-12-02 13:50:38 +00:00
|
|
|
|
|
|
|
SkBitmap original;
|
2014-02-13 14:41:43 +00:00
|
|
|
original.allocN32Pixels(40, 40);
|
2013-12-02 13:50:38 +00:00
|
|
|
|
2014-03-05 13:44:18 +00:00
|
|
|
SkBitmap scaled1;
|
|
|
|
scaled1.allocN32Pixels(20, 20);
|
2013-12-02 13:50:38 +00:00
|
|
|
|
2014-03-05 13:44:18 +00:00
|
|
|
SkBitmap scaled2;
|
|
|
|
scaled2.allocN32Pixels(20, 20);
|
|
|
|
|
|
|
|
SkScaledImageCache::ID* id1 = cache.addAndLock(original, 0.5f, 0.5f, scaled1);
|
|
|
|
SkScaledImageCache::ID* id2 = cache.addAndLock(original, 0.5f, 0.5f, scaled2);
|
2013-12-02 13:50:38 +00:00
|
|
|
// We don't really care if id1 == id2 as long as unlocking both works.
|
|
|
|
cache.unlock(id1);
|
|
|
|
cache.unlock(id2);
|
2014-03-05 13:44:18 +00:00
|
|
|
|
|
|
|
SkBitmap tmp;
|
|
|
|
// Lookup should return the value that was added last.
|
|
|
|
SkScaledImageCache::ID* id = cache.findAndLock(original, 0.5f, 0.5f, &tmp);
|
|
|
|
REPORTER_ASSERT(r, NULL != id);
|
|
|
|
REPORTER_ASSERT(r, tmp.getGenerationID() == scaled2.getGenerationID());
|
|
|
|
cache.unlock(id);
|
2013-12-02 13:50:38 +00:00
|
|
|
}
|