Allow SkLazyPixelRef to use SkScaledImageCache
- SkScaledImageCache: - Add new FindAndLock/AddAndLock variants that work well with SkLazyPixelRefs (take width, height, generation_id). - Add static versions of these new variants. - SkLazyPixelRef: - If NULL passed in as SkImageCache* in the constructor, it will now default to using the static SkScaledImageCache methods to cache decoded images. - If (fImageCache==NULL), the default allocator can be changed with the setAllocator method. If (fImageCache!=NULL), the SkImageCache handles allocation. - CachedDecodingPixelRefTest to test the new functionality. BUG= R=scroggo@google.com, mtklein@google.com, reed@google.com Author: halcanary@google.com Review URL: https://codereview.chromium.org/37343002 git-svn-id: http://skia.googlecode.com/svn/trunk@12006 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
f9a2759d94
commit
7585479202
@ -10,6 +10,7 @@
|
|||||||
'include_dirs' : [
|
'include_dirs' : [
|
||||||
'../src/core',
|
'../src/core',
|
||||||
'../src/effects',
|
'../src/effects',
|
||||||
|
'../src/image',
|
||||||
'../src/lazy',
|
'../src/lazy',
|
||||||
'../src/pathops',
|
'../src/pathops',
|
||||||
'../src/pdf',
|
'../src/pdf',
|
||||||
@ -34,6 +35,7 @@
|
|||||||
'../tests/BitSetTest.cpp',
|
'../tests/BitSetTest.cpp',
|
||||||
'../tests/BlitRowTest.cpp',
|
'../tests/BlitRowTest.cpp',
|
||||||
'../tests/BlurTest.cpp',
|
'../tests/BlurTest.cpp',
|
||||||
|
'../tests/CachedDecodingPixelRefTest.cpp',
|
||||||
'../tests/CanvasTest.cpp',
|
'../tests/CanvasTest.cpp',
|
||||||
'../tests/CanvasStateTest.cpp',
|
'../tests/CanvasStateTest.cpp',
|
||||||
'../tests/ChecksumTest.cpp',
|
'../tests/ChecksumTest.cpp',
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
|
|
||||||
#include "SkScaledImageCache.h"
|
#include "SkScaledImageCache.h"
|
||||||
#include "SkMipMap.h"
|
#include "SkMipMap.h"
|
||||||
|
#include "SkOnce.h"
|
||||||
#include "SkPixelRef.h"
|
#include "SkPixelRef.h"
|
||||||
#include "SkRect.h"
|
#include "SkRect.h"
|
||||||
|
|
||||||
@ -14,6 +15,13 @@
|
|||||||
#define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024)
|
#define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
|
||||||
|
return reinterpret_cast<SkScaledImageCache::ID*>(rec);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
|
||||||
|
return reinterpret_cast<SkScaledImageCache::Rec*>(id);
|
||||||
|
}
|
||||||
|
|
||||||
// Implemented from en.wikipedia.org/wiki/MurmurHash.
|
// Implemented from en.wikipedia.org/wiki/MurmurHash.
|
||||||
static uint32_t compute_hash(const uint32_t data[], int count) {
|
static uint32_t compute_hash(const uint32_t data[], int count) {
|
||||||
@ -42,23 +50,15 @@ static uint32_t compute_hash(const uint32_t data[], int count) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
struct Key {
|
struct Key {
|
||||||
bool init(const SkBitmap& bm, SkScalar scaleX, SkScalar scaleY) {
|
Key(uint32_t genID,
|
||||||
SkPixelRef* pr = bm.pixelRef();
|
SkScalar scaleX,
|
||||||
if (!pr) {
|
SkScalar scaleY,
|
||||||
return false;
|
SkIRect bounds)
|
||||||
}
|
: fGenID(genID)
|
||||||
|
, fScaleX(scaleX)
|
||||||
size_t x, y;
|
, fScaleY(scaleY)
|
||||||
SkTDivMod(bm.pixelRefOffset(), bm.rowBytes(), &y, &x);
|
, fBounds(bounds) {
|
||||||
x >>= 2;
|
|
||||||
|
|
||||||
fGenID = pr->getGenerationID();
|
|
||||||
fBounds.set(x, y, x + bm.width(), y + bm.height());
|
|
||||||
fScaleX = scaleX;
|
|
||||||
fScaleY = scaleY;
|
|
||||||
|
|
||||||
fHash = compute_hash(&fGenID, 7);
|
fHash = compute_hash(&fGenID, 7);
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool operator<(const Key& other) const {
|
bool operator<(const Key& other) const {
|
||||||
@ -151,6 +151,17 @@ class SkScaledImageCache::Hash : public SkTDynamicHash<SkScaledImageCache::Rec,
|
|||||||
// experimental hash to speed things up
|
// experimental hash to speed things up
|
||||||
#define USE_HASH
|
#define USE_HASH
|
||||||
|
|
||||||
|
#if !defined(USE_HASH)
|
||||||
|
static inline SkScaledImageCache::Rec* find_rec_in_list(
|
||||||
|
SkScaledImageCache::Rec* head, const Key & key) {
|
||||||
|
SkScaledImageCache::Rec* rec = head;
|
||||||
|
while ((rec != NULL) && (rec->fKey != key)) {
|
||||||
|
rec = rec->fNext;
|
||||||
|
}
|
||||||
|
return rec;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
SkScaledImageCache::SkScaledImageCache(size_t byteLimit) {
|
SkScaledImageCache::SkScaledImageCache(size_t byteLimit) {
|
||||||
fHead = NULL;
|
fHead = NULL;
|
||||||
fTail = NULL;
|
fTail = NULL;
|
||||||
@ -174,26 +185,24 @@ SkScaledImageCache::~SkScaledImageCache() {
|
|||||||
delete fHash;
|
delete fHash;
|
||||||
}
|
}
|
||||||
|
|
||||||
SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkBitmap& orig,
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
/**
|
||||||
|
This private method is the fully general record finder. All other
|
||||||
|
record finders should call this funtion. */
|
||||||
|
SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID,
|
||||||
SkScalar scaleX,
|
SkScalar scaleX,
|
||||||
SkScalar scaleY) {
|
SkScalar scaleY,
|
||||||
Key key;
|
const SkIRect& bounds) {
|
||||||
if (!key.init(orig, scaleX, scaleY)) {
|
if (bounds.isEmpty()) {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
Key key(genID, scaleX, scaleY, bounds);
|
||||||
#ifdef USE_HASH
|
#ifdef USE_HASH
|
||||||
Rec* rec = fHash->find(key);
|
Rec* rec = fHash->find(key);
|
||||||
#else
|
#else
|
||||||
Rec* rec = fHead;
|
Rec* rec = find_rec_in_list(fHead, key);
|
||||||
while (rec != NULL) {
|
|
||||||
if (rec->fKey == key) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
rec = rec->fNext;
|
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (rec) {
|
if (rec) {
|
||||||
this->moveToHead(rec); // for our LRU
|
this->moveToHead(rec); // for our LRU
|
||||||
rec->fLockCount += 1;
|
rec->fLockCount += 1;
|
||||||
@ -201,6 +210,36 @@ SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkBitmap& orig,
|
|||||||
return rec;
|
return rec;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
This function finds the bounds of the bitmap *within its pixelRef*.
|
||||||
|
If the bitmap lacks a pixelRef, it will return an empty rect, since
|
||||||
|
that doesn't make sense. This may be a useful enough function that
|
||||||
|
it should be somewhere else (in SkBitmap?). */
|
||||||
|
static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) {
|
||||||
|
if (!(bm.pixelRef())) {
|
||||||
|
return SkIRect::MakeEmpty();
|
||||||
|
}
|
||||||
|
size_t x, y;
|
||||||
|
SkTDivMod(bm.pixelRefOffset(), bm.rowBytes(), &y, &x);
|
||||||
|
x >>= bm.shiftPerPixel();
|
||||||
|
return SkIRect::MakeXYWH(x, y, bm.width(), bm.height());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID,
|
||||||
|
int32_t width,
|
||||||
|
int32_t height,
|
||||||
|
SkBitmap* bitmap) {
|
||||||
|
Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1,
|
||||||
|
SkIRect::MakeWH(width, height));
|
||||||
|
if (rec) {
|
||||||
|
SkASSERT(NULL == rec->fMip);
|
||||||
|
SkASSERT(rec->fBitmap.pixelRef());
|
||||||
|
*bitmap = rec->fBitmap;
|
||||||
|
}
|
||||||
|
return rec_to_id(rec);
|
||||||
|
}
|
||||||
|
|
||||||
SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
|
SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
|
||||||
SkScalar scaleX,
|
SkScalar scaleX,
|
||||||
SkScalar scaleY,
|
SkScalar scaleY,
|
||||||
@ -209,25 +248,53 @@ SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
|
|||||||
// degenerate, and the key we use for mipmaps
|
// degenerate, and the key we use for mipmaps
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX,
|
||||||
Rec* rec = this->findAndLock(orig, scaleX, scaleY);
|
scaleY, get_bounds_from_bitmap(orig));
|
||||||
if (rec) {
|
if (rec) {
|
||||||
SkASSERT(NULL == rec->fMip);
|
SkASSERT(NULL == rec->fMip);
|
||||||
SkASSERT(rec->fBitmap.pixelRef());
|
SkASSERT(rec->fBitmap.pixelRef());
|
||||||
*scaled = rec->fBitmap;
|
*scaled = rec->fBitmap;
|
||||||
}
|
}
|
||||||
return (ID*)rec;
|
return rec_to_id(rec);
|
||||||
}
|
}
|
||||||
|
|
||||||
SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig,
|
SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig,
|
||||||
SkMipMap const ** mip) {
|
SkMipMap const ** mip) {
|
||||||
Rec* rec = this->findAndLock(orig, 0, 0);
|
Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0,
|
||||||
|
get_bounds_from_bitmap(orig));
|
||||||
if (rec) {
|
if (rec) {
|
||||||
SkASSERT(rec->fMip);
|
SkASSERT(rec->fMip);
|
||||||
SkASSERT(NULL == rec->fBitmap.pixelRef());
|
SkASSERT(NULL == rec->fBitmap.pixelRef());
|
||||||
*mip = rec->fMip;
|
*mip = rec->fMip;
|
||||||
}
|
}
|
||||||
return (ID*)rec;
|
return rec_to_id(rec);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
/**
|
||||||
|
This private method is the fully general record adder. All other
|
||||||
|
record adders should call this funtion. */
|
||||||
|
void SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
|
||||||
|
SkASSERT(rec);
|
||||||
|
this->addToHead(rec);
|
||||||
|
SkASSERT(1 == rec->fLockCount);
|
||||||
|
#ifdef USE_HASH
|
||||||
|
SkASSERT(fHash);
|
||||||
|
fHash->add(rec);
|
||||||
|
#endif
|
||||||
|
// We may (now) be overbudget, so see if we need to purge something.
|
||||||
|
this->purgeAsNeeded();
|
||||||
|
}
|
||||||
|
|
||||||
|
SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID,
|
||||||
|
int32_t width,
|
||||||
|
int32_t height,
|
||||||
|
const SkBitmap& bitmap) {
|
||||||
|
Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height));
|
||||||
|
Rec* rec = SkNEW_ARGS(Rec, (key, bitmap));
|
||||||
|
this->addAndLock(rec);
|
||||||
|
return rec_to_id(rec);
|
||||||
}
|
}
|
||||||
|
|
||||||
SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
|
SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
|
||||||
@ -238,43 +305,26 @@ SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
|
|||||||
// degenerate, and the key we use for mipmaps
|
// degenerate, and the key we use for mipmaps
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
SkIRect bounds = get_bounds_from_bitmap(orig);
|
||||||
Key key;
|
if (bounds.isEmpty()) {
|
||||||
if (!key.init(orig, scaleX, scaleY)) {
|
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
Key key(orig.getGenerationID(), scaleX, scaleY, bounds);
|
||||||
Rec* rec = SkNEW_ARGS(Rec, (key, scaled));
|
Rec* rec = SkNEW_ARGS(Rec, (key, scaled));
|
||||||
this->addToHead(rec);
|
this->addAndLock(rec);
|
||||||
SkASSERT(1 == rec->fLockCount);
|
return rec_to_id(rec);
|
||||||
|
|
||||||
#ifdef USE_HASH
|
|
||||||
fHash->add(rec);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// We may (now) be overbudget, so see if we need to purge something.
|
|
||||||
this->purgeAsNeeded();
|
|
||||||
return (ID*)rec;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig,
|
SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig,
|
||||||
const SkMipMap* mip) {
|
const SkMipMap* mip) {
|
||||||
Key key;
|
SkIRect bounds = get_bounds_from_bitmap(orig);
|
||||||
if (!key.init(orig, 0, 0)) {
|
if (bounds.isEmpty()) {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
Key key(orig.getGenerationID(), 0, 0, bounds);
|
||||||
Rec* rec = SkNEW_ARGS(Rec, (key, mip));
|
Rec* rec = SkNEW_ARGS(Rec, (key, mip));
|
||||||
this->addToHead(rec);
|
this->addAndLock(rec);
|
||||||
SkASSERT(1 == rec->fLockCount);
|
return rec_to_id(rec);
|
||||||
|
|
||||||
#ifdef USE_HASH
|
|
||||||
fHash->add(rec);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// We may (now) be overbudget, so see if we need to purge something.
|
|
||||||
this->purgeAsNeeded();
|
|
||||||
return (ID*)rec;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
|
void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
|
||||||
@ -285,7 +335,7 @@ void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
|
|||||||
bool found = false;
|
bool found = false;
|
||||||
Rec* rec = fHead;
|
Rec* rec = fHead;
|
||||||
while (rec != NULL) {
|
while (rec != NULL) {
|
||||||
if ((ID*)rec == id) {
|
if (rec == id_to_rec(id)) {
|
||||||
found = true;
|
found = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -294,7 +344,7 @@ void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
|
|||||||
SkASSERT(found);
|
SkASSERT(found);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
Rec* rec = (Rec*)id;
|
Rec* rec = id_to_rec(id);
|
||||||
SkASSERT(rec->fLockCount > 0);
|
SkASSERT(rec->fLockCount > 0);
|
||||||
rec->fLockCount -= 1;
|
rec->fLockCount -= 1;
|
||||||
|
|
||||||
@ -451,14 +501,38 @@ void SkScaledImageCache::validate() const {
|
|||||||
|
|
||||||
SK_DECLARE_STATIC_MUTEX(gMutex);
|
SK_DECLARE_STATIC_MUTEX(gMutex);
|
||||||
|
|
||||||
|
static void create_cache(SkScaledImageCache** cache) {
|
||||||
|
*cache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
|
||||||
|
}
|
||||||
|
|
||||||
static SkScaledImageCache* get_cache() {
|
static SkScaledImageCache* get_cache() {
|
||||||
static SkScaledImageCache* gCache;
|
static SkScaledImageCache* gCache(NULL);
|
||||||
if (!gCache) {
|
SK_DECLARE_STATIC_ONCE(create_cache_once);
|
||||||
gCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
|
SkOnce<SkScaledImageCache**>(&create_cache_once, create_cache, &gCache);
|
||||||
}
|
SkASSERT(NULL != gCache);
|
||||||
return gCache;
|
return gCache;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(
|
||||||
|
uint32_t pixelGenerationID,
|
||||||
|
int32_t width,
|
||||||
|
int32_t height,
|
||||||
|
SkBitmap* scaled) {
|
||||||
|
SkAutoMutexAcquire am(gMutex);
|
||||||
|
return get_cache()->findAndLock(pixelGenerationID, width, height, scaled);
|
||||||
|
}
|
||||||
|
|
||||||
|
SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(
|
||||||
|
uint32_t pixelGenerationID,
|
||||||
|
int32_t width,
|
||||||
|
int32_t height,
|
||||||
|
const SkBitmap& scaled) {
|
||||||
|
SkAutoMutexAcquire am(gMutex);
|
||||||
|
return get_cache()->addAndLock(pixelGenerationID, width, height, scaled);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig,
|
SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig,
|
||||||
SkScalar scaleX,
|
SkScalar scaleX,
|
||||||
SkScalar scaleY,
|
SkScalar scaleY,
|
||||||
|
@ -31,13 +31,25 @@ public:
|
|||||||
* instance of this cache.
|
* instance of this cache.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
static ID* FindAndLock(uint32_t pixelGenerationID,
|
||||||
|
int32_t width,
|
||||||
|
int32_t height,
|
||||||
|
SkBitmap* returnedBitmap);
|
||||||
|
|
||||||
static ID* FindAndLock(const SkBitmap& original, SkScalar scaleX,
|
static ID* FindAndLock(const SkBitmap& original, SkScalar scaleX,
|
||||||
SkScalar scaleY, SkBitmap* scaled);
|
SkScalar scaleY, SkBitmap* returnedBitmap);
|
||||||
static ID* FindAndLockMip(const SkBitmap& original, SkMipMap const**);
|
static ID* FindAndLockMip(const SkBitmap& original,
|
||||||
|
SkMipMap const** returnedMipMap);
|
||||||
|
|
||||||
|
|
||||||
|
static ID* AddAndLock(uint32_t pixelGenerationID,
|
||||||
|
int32_t width,
|
||||||
|
int32_t height,
|
||||||
|
const SkBitmap& bitmap);
|
||||||
|
|
||||||
static ID* AddAndLock(const SkBitmap& original, SkScalar scaleX,
|
static ID* AddAndLock(const SkBitmap& original, SkScalar scaleX,
|
||||||
SkScalar scaleY, const SkBitmap& scaled);
|
SkScalar scaleY, const SkBitmap& bitmap);
|
||||||
static ID* AddAndLockMip(const SkBitmap& original, const SkMipMap*);
|
static ID* AddAndLockMip(const SkBitmap& original, const SkMipMap* mipMap);
|
||||||
|
|
||||||
static void Unlock(ID*);
|
static void Unlock(ID*);
|
||||||
|
|
||||||
@ -51,24 +63,48 @@ public:
|
|||||||
~SkScaledImageCache();
|
~SkScaledImageCache();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Search the cache for a scaled version of original. If found, return it
|
* Search the cache for a matching bitmap (using generationID,
|
||||||
* in scaled, and return its ID pointer. Use the returned ptr to unlock
|
* width, and height as a search key). If found, return it in
|
||||||
* the cache when you are done using scaled.
|
* returnedBitmap, and return its ID pointer. Use the returned
|
||||||
|
* ptr to unlock the cache when you are done using
|
||||||
|
* returnedBitmap.
|
||||||
*
|
*
|
||||||
* If a match is not found, scaled will be unmodifed, and NULL will be
|
* If a match is not found, returnedBitmap will be unmodifed, and
|
||||||
* returned.
|
* NULL will be returned.
|
||||||
|
*
|
||||||
|
* This is used if there is no scaling or subsetting, for example
|
||||||
|
* by SkLazyPixelRef.
|
||||||
*/
|
*/
|
||||||
ID* findAndLock(const SkBitmap& original, SkScalar scaleX,
|
ID* findAndLock(uint32_t pixelGenerationID, int32_t width, int32_t height,
|
||||||
SkScalar scaleY, SkBitmap* scaled);
|
SkBitmap* returnedBitmap);
|
||||||
ID* findAndLockMip(const SkBitmap& original, SkMipMap const**);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* To add a new (scaled) bitmap to the cache, call AddAndLock. Use the
|
* Search the cache for a scaled version of original. If found,
|
||||||
* returned ptr to unlock the cache when you are done using scaled.
|
* return it in returnedBitmap, and return its ID pointer. Use
|
||||||
|
* the returned ptr to unlock the cache when you are done using
|
||||||
|
* returnedBitmap.
|
||||||
|
*
|
||||||
|
* If a match is not found, returnedBitmap will be unmodifed, and
|
||||||
|
* NULL will be returned.
|
||||||
*/
|
*/
|
||||||
|
ID* findAndLock(const SkBitmap& original, SkScalar scaleX,
|
||||||
|
SkScalar scaleY, SkBitmap* returnedBitmap);
|
||||||
|
ID* findAndLockMip(const SkBitmap& original,
|
||||||
|
SkMipMap const** returnedMipMap);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* To add a new bitmap (or mipMap) to the cache, call
|
||||||
|
* AddAndLock. Use the returned ptr to unlock the cache when you
|
||||||
|
* are done using scaled.
|
||||||
|
*
|
||||||
|
* Use (generationID, width, and height) or (original, scaleX,
|
||||||
|
* scaleY) or (original) as a search key
|
||||||
|
*/
|
||||||
|
ID* addAndLock(uint32_t pixelGenerationID, int32_t width, int32_t height,
|
||||||
|
const SkBitmap& bitmap);
|
||||||
ID* addAndLock(const SkBitmap& original, SkScalar scaleX,
|
ID* addAndLock(const SkBitmap& original, SkScalar scaleX,
|
||||||
SkScalar scaleY, const SkBitmap& scaled);
|
SkScalar scaleY, const SkBitmap& bitmap);
|
||||||
ID* addAndLockMip(const SkBitmap& original, const SkMipMap*);
|
ID* addAndLockMip(const SkBitmap& original, const SkMipMap* mipMap);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a non-null ID ptr returned by either findAndLock or addAndLock,
|
* Given a non-null ID ptr returned by either findAndLock or addAndLock,
|
||||||
@ -101,7 +137,9 @@ private:
|
|||||||
size_t fByteLimit;
|
size_t fByteLimit;
|
||||||
int fCount;
|
int fCount;
|
||||||
|
|
||||||
Rec* findAndLock(const SkBitmap& original, SkScalar sx, SkScalar sy);
|
Rec* findAndLock(uint32_t generationID, SkScalar sx, SkScalar sy,
|
||||||
|
const SkIRect& bounds);
|
||||||
|
void addAndLock(Rec* rec);
|
||||||
|
|
||||||
void purgeAsNeeded();
|
void purgeAsNeeded();
|
||||||
|
|
||||||
@ -115,5 +153,4 @@ private:
|
|||||||
void validate() const {}
|
void validate() const {}
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
#include "SkData.h"
|
#include "SkData.h"
|
||||||
#include "SkImageCache.h"
|
#include "SkImageCache.h"
|
||||||
#include "SkImagePriv.h"
|
#include "SkImagePriv.h"
|
||||||
|
#include "SkScaledImageCache.h"
|
||||||
|
|
||||||
#if LAZY_CACHE_STATS
|
#if LAZY_CACHE_STATS
|
||||||
#include "SkThread.h"
|
#include "SkThread.h"
|
||||||
@ -22,9 +23,9 @@ int32_t SkLazyPixelRef::gCacheMisses;
|
|||||||
SkLazyPixelRef::SkLazyPixelRef(SkData* data, SkBitmapFactory::DecodeProc proc, SkImageCache* cache)
|
SkLazyPixelRef::SkLazyPixelRef(SkData* data, SkBitmapFactory::DecodeProc proc, SkImageCache* cache)
|
||||||
// Pass NULL for the Mutex so that the default (ring buffer) will be used.
|
// Pass NULL for the Mutex so that the default (ring buffer) will be used.
|
||||||
: INHERITED(NULL)
|
: INHERITED(NULL)
|
||||||
|
, fErrorInDecoding(false)
|
||||||
, fDecodeProc(proc)
|
, fDecodeProc(proc)
|
||||||
, fImageCache(cache)
|
, fImageCache(cache)
|
||||||
, fCacheId(SkImageCache::UNINITIALIZED_ID)
|
|
||||||
, fRowBytes(0) {
|
, fRowBytes(0) {
|
||||||
SkASSERT(fDecodeProc != NULL);
|
SkASSERT(fDecodeProc != NULL);
|
||||||
if (NULL == data) {
|
if (NULL == data) {
|
||||||
@ -35,8 +36,12 @@ SkLazyPixelRef::SkLazyPixelRef(SkData* data, SkBitmapFactory::DecodeProc proc, S
|
|||||||
fData->ref();
|
fData->ref();
|
||||||
fErrorInDecoding = data->size() == 0;
|
fErrorInDecoding = data->size() == 0;
|
||||||
}
|
}
|
||||||
SkASSERT(cache != NULL);
|
if (fImageCache != NULL) {
|
||||||
cache->ref();
|
fImageCache->ref();
|
||||||
|
fCacheId = SkImageCache::UNINITIALIZED_ID;
|
||||||
|
} else {
|
||||||
|
fScaledCacheId = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
// mark as uninitialized -- all fields are -1
|
// mark as uninitialized -- all fields are -1
|
||||||
memset(&fLazilyCachedInfo, 0xFF, sizeof(fLazilyCachedInfo));
|
memset(&fLazilyCachedInfo, 0xFF, sizeof(fLazilyCachedInfo));
|
||||||
@ -48,6 +53,14 @@ SkLazyPixelRef::SkLazyPixelRef(SkData* data, SkBitmapFactory::DecodeProc proc, S
|
|||||||
SkLazyPixelRef::~SkLazyPixelRef() {
|
SkLazyPixelRef::~SkLazyPixelRef() {
|
||||||
SkASSERT(fData != NULL);
|
SkASSERT(fData != NULL);
|
||||||
fData->unref();
|
fData->unref();
|
||||||
|
if (NULL == fImageCache) {
|
||||||
|
if (fScaledCacheId != NULL) {
|
||||||
|
SkScaledImageCache::Unlock(fScaledCacheId);
|
||||||
|
// TODO(halcanary): SkScaledImageCache needs a
|
||||||
|
// throwAwayCache(id) method.
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
SkASSERT(fImageCache);
|
SkASSERT(fImageCache);
|
||||||
if (fCacheId != SkImageCache::UNINITIALIZED_ID) {
|
if (fCacheId != SkImageCache::UNINITIALIZED_ID) {
|
||||||
fImageCache->throwAwayCache(fCacheId);
|
fImageCache->throwAwayCache(fCacheId);
|
||||||
@ -79,10 +92,91 @@ const SkImage::Info* SkLazyPixelRef::getCachedInfo() {
|
|||||||
return &fLazilyCachedInfo;
|
return &fLazilyCachedInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Returns bitmap->getPixels() on success; NULL on failure */
|
||||||
|
static void* decode_into_bitmap(SkImage::Info* info,
|
||||||
|
SkBitmapFactory::DecodeProc decodeProc,
|
||||||
|
size_t* rowBytes,
|
||||||
|
SkData* data,
|
||||||
|
SkBitmap* bm) {
|
||||||
|
SkASSERT(info && decodeProc && rowBytes && data && bm);
|
||||||
|
if (!(bm->setConfig(SkImageInfoToBitmapConfig(*info), info->fWidth,
|
||||||
|
info->fHeight, *rowBytes, info->fAlphaType)
|
||||||
|
&& bm->allocPixels(NULL, NULL))) {
|
||||||
|
// Use the default allocator. It may be necessary for the
|
||||||
|
// SkLazyPixelRef to have a allocator field which is passed
|
||||||
|
// into allocPixels().
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
SkBitmapFactory::Target target;
|
||||||
|
target.fAddr = bm->getPixels();
|
||||||
|
target.fRowBytes = bm->rowBytes();
|
||||||
|
*rowBytes = target.fRowBytes;
|
||||||
|
if (!decodeProc(data->data(), data->size(), info, &target)) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
return target.fAddr;
|
||||||
|
}
|
||||||
|
|
||||||
|
void* SkLazyPixelRef::lockScaledImageCachePixels() {
|
||||||
|
SkASSERT(!fErrorInDecoding);
|
||||||
|
SkASSERT(NULL == fImageCache);
|
||||||
|
SkBitmap bitmap;
|
||||||
|
const SkImage::Info* info = this->getCachedInfo();
|
||||||
|
if (info == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
// If this is the first time though, this is guaranteed to fail.
|
||||||
|
// Maybe we should have a flag that says "don't even bother looking"
|
||||||
|
fScaledCacheId = SkScaledImageCache::FindAndLock(this->getGenerationID(),
|
||||||
|
info->fWidth,
|
||||||
|
info->fHeight,
|
||||||
|
&bitmap);
|
||||||
|
if (fScaledCacheId != NULL) {
|
||||||
|
SkAutoLockPixels autoLockPixels(bitmap);
|
||||||
|
void* pixels = bitmap.getPixels();
|
||||||
|
SkASSERT(NULL != pixels);
|
||||||
|
// At this point, the autoLockPixels will unlockPixels()
|
||||||
|
// to remove bitmap's lock on the pixels. We will then
|
||||||
|
// destroy bitmap. The *only* guarantee that this pointer
|
||||||
|
// remains valid is the guarantee made by
|
||||||
|
// SkScaledImageCache that it will not destroy the *other*
|
||||||
|
// bitmap (SkScaledImageCache::Rec.fBitmap) that holds a
|
||||||
|
// reference to the concrete PixelRef while this record is
|
||||||
|
// locked.
|
||||||
|
return pixels;
|
||||||
|
} else {
|
||||||
|
// Cache has been purged, must re-decode.
|
||||||
|
void* pixels = decode_into_bitmap(const_cast<SkImage::Info*>(info),
|
||||||
|
fDecodeProc, &fRowBytes, fData,
|
||||||
|
&bitmap);
|
||||||
|
if (NULL == pixels) {
|
||||||
|
fErrorInDecoding = true;
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
fScaledCacheId = SkScaledImageCache::AddAndLock(this->getGenerationID(),
|
||||||
|
info->fWidth,
|
||||||
|
info->fHeight,
|
||||||
|
bitmap);
|
||||||
|
SkASSERT(fScaledCacheId != NULL);
|
||||||
|
return pixels;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void* SkLazyPixelRef::onLockPixels(SkColorTable**) {
|
void* SkLazyPixelRef::onLockPixels(SkColorTable**) {
|
||||||
if (fErrorInDecoding) {
|
if (fErrorInDecoding) {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
if (NULL == fImageCache) {
|
||||||
|
return this->lockScaledImageCachePixels();
|
||||||
|
} else {
|
||||||
|
return this->lockImageCachePixels();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void* SkLazyPixelRef::lockImageCachePixels() {
|
||||||
|
SkASSERT(fImageCache != NULL);
|
||||||
|
SkASSERT(!fErrorInDecoding);
|
||||||
SkBitmapFactory::Target target;
|
SkBitmapFactory::Target target;
|
||||||
// Check to see if the pixels still exist in the cache.
|
// Check to see if the pixels still exist in the cache.
|
||||||
if (SkImageCache::UNINITIALIZED_ID == fCacheId) {
|
if (SkImageCache::UNINITIALIZED_ID == fCacheId) {
|
||||||
@ -147,8 +241,19 @@ void SkLazyPixelRef::onUnlockPixels() {
|
|||||||
if (fErrorInDecoding) {
|
if (fErrorInDecoding) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (fCacheId != SkImageCache::UNINITIALIZED_ID) {
|
if (NULL == fImageCache) {
|
||||||
fImageCache->releaseCache(fCacheId);
|
// onUnlockPixels() should never be called a second time from
|
||||||
|
// PixelRef::Unlock() without calling onLockPixels() first.
|
||||||
|
SkASSERT(NULL != fScaledCacheId);
|
||||||
|
if (NULL != fScaledCacheId) {
|
||||||
|
SkScaledImageCache::Unlock(fScaledCacheId);
|
||||||
|
fScaledCacheId = NULL;
|
||||||
|
}
|
||||||
|
} else { // use fImageCache
|
||||||
|
SkASSERT(SkImageCache::UNINITIALIZED_ID != fCacheId);
|
||||||
|
if (SkImageCache::UNINITIALIZED_ID != fCacheId) {
|
||||||
|
fImageCache->releaseCache(fCacheId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -157,8 +262,6 @@ SkData* SkLazyPixelRef::onRefEncodedData() {
|
|||||||
return fData;
|
return fData;
|
||||||
}
|
}
|
||||||
|
|
||||||
#include "SkImagePriv.h"
|
|
||||||
|
|
||||||
static bool init_from_info(SkBitmap* bm, const SkImage::Info& info,
|
static bool init_from_info(SkBitmap* bm, const SkImage::Info& info,
|
||||||
size_t rowBytes) {
|
size_t rowBytes) {
|
||||||
SkBitmap::Config config = SkImageInfoToBitmapConfig(info);
|
SkBitmap::Config config = SkImageInfoToBitmapConfig(info);
|
||||||
@ -206,3 +309,4 @@ bool SkLazyPixelRef::onDecodeInto(int pow2, SkBitmap* bitmap) {
|
|||||||
*bitmap = tmp;
|
*bitmap = tmp;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,8 +10,10 @@
|
|||||||
|
|
||||||
#include "SkBitmapFactory.h"
|
#include "SkBitmapFactory.h"
|
||||||
#include "SkImage.h"
|
#include "SkImage.h"
|
||||||
|
#include "SkImageCache.h"
|
||||||
#include "SkPixelRef.h"
|
#include "SkPixelRef.h"
|
||||||
#include "SkFlattenable.h"
|
#include "SkFlattenable.h"
|
||||||
|
#include "SkScaledImageCache.h"
|
||||||
|
|
||||||
class SkColorTable;
|
class SkColorTable;
|
||||||
class SkData;
|
class SkData;
|
||||||
@ -33,8 +35,9 @@ public:
|
|||||||
* Create a new SkLazyPixelRef.
|
* Create a new SkLazyPixelRef.
|
||||||
* @param SkData Encoded data representing the pixels.
|
* @param SkData Encoded data representing the pixels.
|
||||||
* @param DecodeProc Called to decode the pixels when needed. Must be non-NULL.
|
* @param DecodeProc Called to decode the pixels when needed. Must be non-NULL.
|
||||||
* @param SkImageCache Object that handles allocating and freeing the pixel memory, as needed.
|
* @param SkImageCache Object that handles allocating and freeing
|
||||||
* Must not be NULL.
|
* the pixel memory, as needed. If NULL, use the global
|
||||||
|
* SkScaledImageCache.
|
||||||
*/
|
*/
|
||||||
SkLazyPixelRef(SkData*, SkBitmapFactory::DecodeProc, SkImageCache*);
|
SkLazyPixelRef(SkData*, SkBitmapFactory::DecodeProc, SkImageCache*);
|
||||||
|
|
||||||
@ -69,7 +72,10 @@ private:
|
|||||||
SkData* fData;
|
SkData* fData;
|
||||||
SkBitmapFactory::DecodeProc fDecodeProc;
|
SkBitmapFactory::DecodeProc fDecodeProc;
|
||||||
SkImageCache* fImageCache;
|
SkImageCache* fImageCache;
|
||||||
intptr_t fCacheId;
|
union {
|
||||||
|
SkImageCache::ID fCacheId;
|
||||||
|
SkScaledImageCache::ID* fScaledCacheId;
|
||||||
|
};
|
||||||
size_t fRowBytes;
|
size_t fRowBytes;
|
||||||
SkImage::Info fLazilyCachedInfo;
|
SkImage::Info fLazilyCachedInfo;
|
||||||
|
|
||||||
@ -80,6 +86,9 @@ private:
|
|||||||
|
|
||||||
// lazily initialized our cached info. Returns NULL on failure.
|
// lazily initialized our cached info. Returns NULL on failure.
|
||||||
const SkImage::Info* getCachedInfo();
|
const SkImage::Info* getCachedInfo();
|
||||||
|
void* lockScaledImageCachePixels();
|
||||||
|
void* lockImageCachePixels();
|
||||||
|
|
||||||
|
|
||||||
typedef SkPixelRef INHERITED;
|
typedef SkPixelRef INHERITED;
|
||||||
};
|
};
|
||||||
|
204
tests/CachedDecodingPixelRefTest.cpp
Normal file
204
tests/CachedDecodingPixelRefTest.cpp
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2013 Google Inc.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license that can be
|
||||||
|
* found in the LICENSE file.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "SkBitmap.h"
|
||||||
|
#include "SkCanvas.h"
|
||||||
|
#include "SkData.h"
|
||||||
|
#include "SkForceLinking.h"
|
||||||
|
#include "SkImageDecoder.h"
|
||||||
|
#include "SkImagePriv.h"
|
||||||
|
#include "SkLazyPixelRef.h"
|
||||||
|
#include "SkScaledImageCache.h"
|
||||||
|
#include "SkStream.h"
|
||||||
|
#include "Test.h"
|
||||||
|
|
||||||
|
__SK_FORCE_IMAGE_DECODER_LINKING;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fill this bitmap with some color.
|
||||||
|
*/
|
||||||
|
static void make_test_image(SkBitmap* bm) {
|
||||||
|
static const int W = 50, H = 50;
|
||||||
|
static const SkBitmap::Config config = SkBitmap::kARGB_8888_Config;
|
||||||
|
bm->setConfig(config, W, H);
|
||||||
|
bm->allocPixels();
|
||||||
|
bm->eraseColor(SK_ColorBLACK);
|
||||||
|
SkCanvas canvas(*bm);
|
||||||
|
SkPaint paint;
|
||||||
|
paint.setColor(SK_ColorBLUE);
|
||||||
|
canvas.drawRectCoords(0, 0, SkIntToScalar(W/2),
|
||||||
|
SkIntToScalar(H/2), paint);
|
||||||
|
paint.setColor(SK_ColorWHITE);
|
||||||
|
canvas.drawRectCoords(SkIntToScalar(W/2), SkIntToScalar(H/2),
|
||||||
|
SkIntToScalar(W), SkIntToScalar(H), paint);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* encode this bitmap into some data via SkImageEncoder
|
||||||
|
*/
|
||||||
|
static SkData* create_data_from_bitmap(const SkBitmap& bm,
|
||||||
|
SkImageEncoder::Type type) {
|
||||||
|
SkDynamicMemoryWStream stream;
|
||||||
|
if (SkImageEncoder::EncodeStream(&stream, bm, type, 100)) {
|
||||||
|
return stream.copyToData();
|
||||||
|
}
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A simplified version of SkBitmapFactory
|
||||||
|
*/
|
||||||
|
static bool simple_bitmap_factory(SkBitmapFactory::DecodeProc proc,
|
||||||
|
SkData* data,
|
||||||
|
SkBitmap* dst) {
|
||||||
|
SkImage::Info info;
|
||||||
|
if (!proc(data->data(), data->size(), &info, NULL)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
dst->setConfig(SkImageInfoToBitmapConfig(info), info.fWidth,
|
||||||
|
info.fHeight, 0, info.fAlphaType);
|
||||||
|
SkAutoTUnref<SkLazyPixelRef> ref(SkNEW_ARGS(SkLazyPixelRef,
|
||||||
|
(data, proc, NULL)));
|
||||||
|
dst->setPixelRef(ref);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void compare_bitmaps(skiatest::Reporter* reporter,
|
||||||
|
const SkBitmap& b1, const SkBitmap& b2,
|
||||||
|
bool pixelPerfect = true) {
|
||||||
|
REPORTER_ASSERT(reporter, b1.empty() == b2.empty());
|
||||||
|
REPORTER_ASSERT(reporter, b1.width() == b2.width());
|
||||||
|
REPORTER_ASSERT(reporter, b1.height() == b2.height());
|
||||||
|
REPORTER_ASSERT(reporter, b1.isNull() == b2.isNull());
|
||||||
|
SkAutoLockPixels autoLockPixels1(b1);
|
||||||
|
SkAutoLockPixels autoLockPixels2(b2);
|
||||||
|
REPORTER_ASSERT(reporter, b1.isNull() == b2.isNull());
|
||||||
|
if (b1.isNull() || b1.empty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
REPORTER_ASSERT(reporter, NULL != b1.getPixels());
|
||||||
|
REPORTER_ASSERT(reporter, NULL != b2.getPixels());
|
||||||
|
if ((!(b1.getPixels())) || (!(b2.getPixels()))) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if ((b1.width() != b2.width()) ||
|
||||||
|
(b1.height() != b2.height())) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!pixelPerfect) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
int pixelErrors = 0;
|
||||||
|
for (int y = 0; y < b2.height(); ++y) {
|
||||||
|
for (int x = 0; x < b2.width(); ++x) {
|
||||||
|
if (b1.getColor(x, y) != b2.getColor(x, y)) {
|
||||||
|
++pixelErrors;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
REPORTER_ASSERT(reporter, 0 == pixelErrors);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This checks to see that a SkLazyPixelRef works as advertized.
|
||||||
|
*/
|
||||||
|
#include "TestClassDef.h"
|
||||||
|
DEF_TEST(CachedDecodingPixelRefTest, reporter) {
|
||||||
|
SkBitmap original;
|
||||||
|
make_test_image(&original);
|
||||||
|
const size_t bitmapSize = original.getSize();
|
||||||
|
const size_t oldByteLimit = SkScaledImageCache::GetByteLimit();
|
||||||
|
REPORTER_ASSERT(reporter, (!(original.empty())) && (!(original.isNull())));
|
||||||
|
|
||||||
|
static const SkImageEncoder::Type types[] = {
|
||||||
|
SkImageEncoder::kPNG_Type,
|
||||||
|
SkImageEncoder::kJPEG_Type,
|
||||||
|
SkImageEncoder::kWEBP_Type
|
||||||
|
};
|
||||||
|
|
||||||
|
for (size_t i = 0; i < SK_ARRAY_COUNT(types); i++) {
|
||||||
|
SkImageEncoder::Type type = types[i];
|
||||||
|
SkAutoDataUnref encoded(create_data_from_bitmap(original, type));
|
||||||
|
REPORTER_ASSERT(reporter, encoded.get() != NULL);
|
||||||
|
if (NULL == encoded.get()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
SkBitmap lazy;
|
||||||
|
static const SkBitmapFactory::DecodeProc decoder =
|
||||||
|
&(SkImageDecoder::DecodeMemoryToTarget);
|
||||||
|
bool success = simple_bitmap_factory(decoder, encoded.get(), &lazy);
|
||||||
|
|
||||||
|
REPORTER_ASSERT(reporter, success);
|
||||||
|
|
||||||
|
size_t bytesUsed = SkScaledImageCache::GetBytesUsed();
|
||||||
|
|
||||||
|
if (oldByteLimit < bitmapSize) {
|
||||||
|
SkScaledImageCache::SetByteLimit(bitmapSize + oldByteLimit);
|
||||||
|
}
|
||||||
|
void* lazyPixels = NULL;
|
||||||
|
|
||||||
|
// Since this is lazy, it shouldn't have fPixels yet!
|
||||||
|
REPORTER_ASSERT(reporter, NULL == lazy.getPixels());
|
||||||
|
{
|
||||||
|
SkAutoLockPixels autoLockPixels(lazy); // now pixels are good.
|
||||||
|
lazyPixels = lazy.getPixels();
|
||||||
|
REPORTER_ASSERT(reporter, NULL != lazy.getPixels());
|
||||||
|
// first time we lock pixels, we should get bump in the size
|
||||||
|
// of the cache by exactly bitmapSize.
|
||||||
|
REPORTER_ASSERT(reporter, bytesUsed + bitmapSize
|
||||||
|
== SkScaledImageCache::GetBytesUsed());
|
||||||
|
bytesUsed = SkScaledImageCache::GetBytesUsed();
|
||||||
|
}
|
||||||
|
// pixels should be gone!
|
||||||
|
REPORTER_ASSERT(reporter, NULL == lazy.getPixels());
|
||||||
|
{
|
||||||
|
SkAutoLockPixels autoLockPixels(lazy); // now pixels are good.
|
||||||
|
REPORTER_ASSERT(reporter, NULL != lazy.getPixels());
|
||||||
|
|
||||||
|
// verify that the same pixels are used this time.
|
||||||
|
REPORTER_ASSERT(reporter, lazy.getPixels() == lazyPixels);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool comparePixels = (SkImageEncoder::kPNG_Type == type);
|
||||||
|
// Only PNG is pixel-perfect.
|
||||||
|
compare_bitmaps(reporter, original, lazy, comparePixels);
|
||||||
|
|
||||||
|
// force the cache to clear by making it too small.
|
||||||
|
SkScaledImageCache::SetByteLimit(bitmapSize / 2);
|
||||||
|
compare_bitmaps(reporter, original, lazy, comparePixels);
|
||||||
|
|
||||||
|
// I'm pretty sure that the logic of the cache should mean
|
||||||
|
// that it will clear to zero, regardless of where it started.
|
||||||
|
REPORTER_ASSERT(reporter, SkScaledImageCache::GetBytesUsed() == 0);
|
||||||
|
// TODO(someone) - write a custom allocator that can verify
|
||||||
|
// that the memory where those pixels were cached really did
|
||||||
|
// get freed.
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////
|
||||||
|
// The following commented-out code happens to work on my
|
||||||
|
// machine, and indicates to me that the SkLazyPixelRef is
|
||||||
|
// behaving as designed. But I don't know an easy way to
|
||||||
|
// guarantee that a second allocation of the same size will
|
||||||
|
// give a different address.
|
||||||
|
////////////////////////////////////////////////////////////////////////
|
||||||
|
// {
|
||||||
|
// // confuse the heap allocation system
|
||||||
|
// SkAutoMalloc autoMalloc(bitmapSize);
|
||||||
|
// REPORTER_ASSERT(reporter, autoMalloc.get() == lazyPixels);
|
||||||
|
// {
|
||||||
|
// SkAutoLockPixels autoLockPixels(lazy);
|
||||||
|
// // verify that *different* pixels are used this time.
|
||||||
|
// REPORTER_ASSERT(reporter, lazy.getPixels() != lazyPixels);
|
||||||
|
// compare_bitmaps(reporter, original, lazy, comparePixels);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// restore cache size
|
||||||
|
SkScaledImageCache::SetByteLimit(oldByteLimit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user