Change bitmapcache to not rely on lockpixels.
The Rec in the cache is the owner of the pixel memory - discardable or - malloc Each external client has a pixelref that just points to those pixels, and whose destructor will notify the rec. This eliminates the dependency on lockPixels in pixelref, freeing us to remove that entirely from pixelref. Bug: skia: Change-Id: If45ed0ae202a1211336626364235215253e8aa7c Reviewed-on: https://skia-review.googlesource.com/10300 Commit-Queue: Mike Reed <reed@google.com> Reviewed-by: Brian Osman <brianosman@google.com> Reviewed-by: Matt Sarett <msarett@google.com> Reviewed-by: Mike Klein <mtklein@google.com>
This commit is contained in:
parent
77443974ea
commit
7a542c559a
@ -316,6 +316,7 @@ private:
|
||||
friend class SkImage_Gpu;
|
||||
friend class SkImageCacherator;
|
||||
friend class SkSpecialImage_Gpu;
|
||||
friend void SkBitmapCache_setImmutableWithID(SkPixelRef*, uint32_t);
|
||||
|
||||
typedef SkRefCnt INHERITED;
|
||||
};
|
||||
|
@ -26,10 +26,6 @@ void SkNotifyBitmapGenIDIsStale(uint32_t bitmapGenID) {
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
SkBitmap::Allocator* SkBitmapCache::GetAllocator() {
|
||||
return SkResourceCache::GetAllocator();
|
||||
}
|
||||
|
||||
/**
|
||||
This function finds the bounds of the bitmap *within its pixelRef*.
|
||||
If the bitmap lacks a pixelRef, it will return an empty rect, since
|
||||
@ -109,61 +105,227 @@ public:
|
||||
|
||||
const SkBitmapCacheDesc fDesc;
|
||||
};
|
||||
}
|
||||
|
||||
struct BitmapRec : public SkResourceCache::Rec {
|
||||
BitmapRec(const SkBitmapCacheDesc& desc, const SkBitmap& result)
|
||||
//////////////////////
|
||||
#include "SkDiscardableMemory.h"
|
||||
#include "SkNextID.h"
|
||||
|
||||
void SkBitmapCache_setImmutableWithID(SkPixelRef* pr, uint32_t id) {
|
||||
pr->setImmutableWithID(id);
|
||||
}
|
||||
|
||||
//#define REC_TRACE SkDebugf
|
||||
static void REC_TRACE(const char format[], ...) {}
|
||||
|
||||
// for diagnostics
|
||||
static int32_t gRecCounter;
|
||||
|
||||
class SkBitmapCache::Rec : public SkResourceCache::Rec {
|
||||
public:
|
||||
Rec(const SkBitmapCacheDesc& desc, const SkImageInfo& info, size_t rowBytes,
|
||||
std::unique_ptr<SkDiscardableMemory> dm, void* block)
|
||||
: fKey(desc)
|
||||
, fBitmap(result)
|
||||
, fDM(std::move(dm))
|
||||
, fMalloc(block)
|
||||
, fInfo(info)
|
||||
, fRowBytes(rowBytes)
|
||||
, fExternalCounter(kBeforeFirstInstall_ExternalCounter)
|
||||
{
|
||||
#ifdef TRACE_NEW_BITMAP_CACHE_RECS
|
||||
fKey.dump();
|
||||
#endif
|
||||
SkASSERT(!(fDM && fMalloc)); // can't have both
|
||||
|
||||
// We need an ID to return with the bitmap/pixelref.
|
||||
// If they are not scaling, we can return the same ID as the key/desc
|
||||
// If they are scaling, we need a new ID
|
||||
if (desc.fScaledWidth == 0 && desc.fScaledHeight == 0) {
|
||||
fPrUniqueID = desc.fImageID;
|
||||
} else {
|
||||
fPrUniqueID = SkNextID::ImageID();
|
||||
}
|
||||
REC_TRACE(" Rec(%d): [%d %d] %d\n",
|
||||
sk_atomic_inc(&gRecCounter), fInfo.width(), fInfo.height(), fPrUniqueID);
|
||||
}
|
||||
|
||||
~Rec() override {
|
||||
SkASSERT(0 == fExternalCounter || kBeforeFirstInstall_ExternalCounter == fExternalCounter);
|
||||
if (fDM && kBeforeFirstInstall_ExternalCounter == fExternalCounter) {
|
||||
// we never installed, so we need to unlock before we destroy the DM
|
||||
SkASSERT(fDM->data());
|
||||
fDM->unlock();
|
||||
}
|
||||
REC_TRACE("~Rec(%d): [%d %d] %d\n",
|
||||
sk_atomic_dec(&gRecCounter) - 1, fInfo.width(), fInfo.height(), fPrUniqueID);
|
||||
sk_free(fMalloc); // may be null
|
||||
}
|
||||
|
||||
const Key& getKey() const override { return fKey; }
|
||||
size_t bytesUsed() const override { return sizeof(fKey) + fBitmap.getSize(); }
|
||||
size_t bytesUsed() const override {
|
||||
return sizeof(fKey) + fInfo.getSafeSize(fRowBytes);
|
||||
}
|
||||
bool canBePurged() override {
|
||||
SkAutoMutexAcquire ama(fMutex);
|
||||
return fExternalCounter == 0;
|
||||
}
|
||||
void postAddInstall(void* payload) override {
|
||||
SkAssertResult(this->install(static_cast<SkBitmap*>(payload)));
|
||||
}
|
||||
|
||||
const char* getCategory() const override { return "bitmap"; }
|
||||
SkDiscardableMemory* diagnostic_only_getDiscardable() const override {
|
||||
return fBitmap.pixelRef()->diagnostic_only_getDiscardable();
|
||||
return fDM.get();
|
||||
}
|
||||
|
||||
static void ReleaseProc(void* addr, void* ctx) {
|
||||
Rec* rec = static_cast<Rec*>(ctx);
|
||||
SkAutoMutexAcquire ama(rec->fMutex);
|
||||
|
||||
REC_TRACE(" Rec: [%d] releaseproc\n", rec->fPrUniqueID);
|
||||
|
||||
SkASSERT(rec->fExternalCounter > 0);
|
||||
rec->fExternalCounter -= 1;
|
||||
if (rec->fDM) {
|
||||
SkASSERT(rec->fMalloc == nullptr);
|
||||
if (rec->fExternalCounter == 0) {
|
||||
REC_TRACE(" Rec [%d] unlock\n", rec->fPrUniqueID);
|
||||
rec->fDM->unlock();
|
||||
}
|
||||
} else {
|
||||
SkASSERT(rec->fMalloc != nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
bool install(SkBitmap* bitmap) {
|
||||
SkAutoMutexAcquire ama(fMutex);
|
||||
|
||||
// are we still valid
|
||||
if (!fDM && !fMalloc) {
|
||||
REC_TRACE(" Rec: [%d] invalid\n", fPrUniqueID);
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
constructor fExternalCount < 0 fDM->data()
|
||||
after install fExternalCount > 0 fDM->data()
|
||||
after Release fExternalCount == 0 !fDM->data()
|
||||
*/
|
||||
if (fDM) {
|
||||
if (kBeforeFirstInstall_ExternalCounter == fExternalCounter) {
|
||||
SkASSERT(fDM->data());
|
||||
} else if (fExternalCounter > 0) {
|
||||
SkASSERT(fDM->data());
|
||||
} else {
|
||||
SkASSERT(fExternalCounter == 0);
|
||||
if (!fDM->lock()) {
|
||||
REC_TRACE(" Rec [%d] re-lock failed\n", fPrUniqueID);
|
||||
fDM.reset(nullptr);
|
||||
return false;
|
||||
}
|
||||
REC_TRACE(" Rec [%d] re-lock succeeded\n", fPrUniqueID);
|
||||
}
|
||||
SkASSERT(fDM->data());
|
||||
}
|
||||
|
||||
bitmap->installPixels(fInfo, fDM ? fDM->data() : fMalloc, fRowBytes, nullptr,
|
||||
ReleaseProc, this);
|
||||
SkBitmapCache_setImmutableWithID(bitmap->pixelRef(), fPrUniqueID);
|
||||
|
||||
REC_TRACE(" Rec: [%d] install new pr\n", fPrUniqueID);
|
||||
|
||||
if (kBeforeFirstInstall_ExternalCounter == fExternalCounter) {
|
||||
fExternalCounter = 1;
|
||||
} else {
|
||||
fExternalCounter += 1;
|
||||
}
|
||||
SkASSERT(fExternalCounter > 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool Finder(const SkResourceCache::Rec& baseRec, void* contextBitmap) {
|
||||
const BitmapRec& rec = static_cast<const BitmapRec&>(baseRec);
|
||||
Rec* rec = (Rec*)&baseRec;
|
||||
SkBitmap* result = (SkBitmap*)contextBitmap;
|
||||
|
||||
*result = rec.fBitmap;
|
||||
result->lockPixels();
|
||||
return SkToBool(result->getPixels());
|
||||
REC_TRACE(" Rec: [%d] found\n", rec->fPrUniqueID);
|
||||
return rec->install(result);
|
||||
}
|
||||
|
||||
private:
|
||||
BitmapKey fKey;
|
||||
SkBitmap fBitmap;
|
||||
|
||||
SkMutex fMutex;
|
||||
|
||||
// either fDM or fMalloc can be non-null, but not both
|
||||
std::unique_ptr<SkDiscardableMemory> fDM;
|
||||
void* fMalloc;
|
||||
|
||||
SkImageInfo fInfo;
|
||||
size_t fRowBytes;
|
||||
uint32_t fPrUniqueID;
|
||||
|
||||
// This field counts the number of external pixelrefs we have created. They notify us when
|
||||
// they are destroyed so we can decrement this.
|
||||
//
|
||||
// > 0 we have outstanding pixelrefs
|
||||
// == 0 we have no outstanding pixelrefs, and can be safely purged
|
||||
// < 0 we have been created, but not yet "installed" the first time.
|
||||
//
|
||||
int fExternalCounter;
|
||||
|
||||
enum {
|
||||
kBeforeFirstInstall_ExternalCounter = -1
|
||||
};
|
||||
};
|
||||
} // namespace
|
||||
|
||||
void SkBitmapCache::PrivateDeleteRec(Rec* rec) { delete rec; }
|
||||
|
||||
SkBitmapCache::RecPtr SkBitmapCache::Alloc(const SkBitmapCacheDesc& desc, const SkImageInfo& info,
|
||||
SkPixmap* pmap) {
|
||||
// Ensure that the caller is self-consistent:
|
||||
// - if they are scaling, the info matches the scaled size
|
||||
// - if they are not, the info matches the subset (i.e. the subset is the entire image)
|
||||
if (desc.fScaledWidth == 0 && desc.fScaledHeight == 0) {
|
||||
SkASSERT(info.width() == desc.fSubset.width());
|
||||
SkASSERT(info.height() == desc.fSubset.height());
|
||||
} else {
|
||||
SkASSERT(info.width() == desc.fScaledWidth);
|
||||
SkASSERT(info.height() == desc.fScaledHeight);
|
||||
}
|
||||
|
||||
const size_t rb = info.minRowBytes();
|
||||
size_t size = info.getSafeSize(rb);
|
||||
if (0 == size) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::unique_ptr<SkDiscardableMemory> dm;
|
||||
void* block = nullptr;
|
||||
|
||||
auto factory = SkResourceCache::GetDiscardableFactory();
|
||||
if (factory) {
|
||||
dm.reset(factory(size));
|
||||
} else {
|
||||
block = sk_malloc_flags(size, 0);
|
||||
}
|
||||
if (!dm && !block) {
|
||||
return nullptr;
|
||||
}
|
||||
*pmap = SkPixmap(info, dm ? dm->data() : block, rb);
|
||||
return RecPtr(new Rec(desc, info, rb, std::move(dm), block));
|
||||
}
|
||||
|
||||
void SkBitmapCache::Add(RecPtr rec, SkBitmap* bitmap) {
|
||||
SkResourceCache::Add(rec.release(), bitmap);
|
||||
}
|
||||
|
||||
bool SkBitmapCache::Find(const SkBitmapCacheDesc& desc, SkBitmap* result) {
|
||||
desc.validate();
|
||||
return SkResourceCache::Find(BitmapKey(desc), SkBitmapCache::Rec::Finder, result);
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////
|
||||
//////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#define CHECK_LOCAL(localCache, localName, globalName, ...) \
|
||||
((localCache) ? localCache->localName(__VA_ARGS__) : SkResourceCache::globalName(__VA_ARGS__))
|
||||
|
||||
bool SkBitmapCache::Find(const SkBitmapCacheDesc& desc, SkBitmap* result,
|
||||
SkResourceCache* localCache) {
|
||||
desc.validate();
|
||||
return CHECK_LOCAL(localCache, find, Find, BitmapKey(desc), BitmapRec::Finder, result);
|
||||
}
|
||||
|
||||
bool SkBitmapCache::Add(const SkBitmapCacheDesc& desc, const SkBitmap& result,
|
||||
SkResourceCache* localCache) {
|
||||
desc.validate();
|
||||
SkASSERT(result.isImmutable());
|
||||
BitmapRec* rec = new BitmapRec(desc, result);
|
||||
CHECK_LOCAL(localCache, add, Add, rec);
|
||||
return true;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////
|
||||
//////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
namespace {
|
||||
static unsigned gMipMapKeyNamespaceLabel;
|
||||
|
||||
|
@ -44,24 +44,21 @@ struct SkBitmapCacheDesc {
|
||||
|
||||
class SkBitmapCache {
|
||||
public:
|
||||
/**
|
||||
* Use this allocator for bitmaps, so they can use ashmem when available.
|
||||
* Returns nullptr if the ResourceCache has not been initialized with a DiscardableFactory.
|
||||
*/
|
||||
static SkBitmap::Allocator* GetAllocator();
|
||||
|
||||
/**
|
||||
* Search based on the desc. If found, returns true and
|
||||
* result will be set to the matching bitmap with its pixels already locked.
|
||||
*/
|
||||
static bool Find(const SkBitmapCacheDesc&, SkBitmap* result,
|
||||
SkResourceCache* localCache = nullptr);
|
||||
static bool Find(const SkBitmapCacheDesc&, SkBitmap* result);
|
||||
|
||||
/*
|
||||
* result must be marked isImmutable()
|
||||
*/
|
||||
static bool Add(const SkBitmapCacheDesc&, const SkBitmap& result,
|
||||
SkResourceCache* localCache = nullptr);
|
||||
class Rec;
|
||||
struct RecDeleter { void operator()(Rec* r) { PrivateDeleteRec(r); } };
|
||||
typedef std::unique_ptr<Rec, RecDeleter> RecPtr;
|
||||
|
||||
static RecPtr Alloc(const SkBitmapCacheDesc&, const SkImageInfo&, SkPixmap*);
|
||||
static void Add(RecPtr, SkBitmap*);
|
||||
|
||||
private:
|
||||
static void PrivateDeleteRec(Rec*);
|
||||
};
|
||||
|
||||
class SkMipMapCache {
|
||||
|
@ -133,21 +133,36 @@ bool SkDefaultBitmapControllerState::processHQRequest(const SkBitmapProvider& pr
|
||||
if (!orig.requestLock(&src)) {
|
||||
return false;
|
||||
}
|
||||
if (!SkBitmapScaler::Resize(&fResultBitmap, src.pixmap(), kHQ_RESIZE_METHOD,
|
||||
dstW, dstH, SkResourceCache::GetAllocator())) {
|
||||
|
||||
SkPixmap dst;
|
||||
SkBitmapCache::RecPtr rec;
|
||||
const SkImageInfo info = SkImageInfo::MakeN32(desc.fScaledWidth, desc.fScaledHeight,
|
||||
src.pixmap().alphaType());
|
||||
if (provider.isVolatile()) {
|
||||
if (!fResultBitmap.tryAllocPixels(info)) {
|
||||
return false;
|
||||
}
|
||||
SkASSERT(fResultBitmap.getPixels());
|
||||
fResultBitmap.peekPixels(&dst);
|
||||
fResultBitmap.setImmutable(); // a little cheat, as we haven't resized yet, but ok
|
||||
} else {
|
||||
rec = SkBitmapCache::Alloc(desc, info, &dst);
|
||||
if (!rec) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!SkBitmapScaler::Resize(dst, src.pixmap(), kHQ_RESIZE_METHOD)) {
|
||||
return false; // we failed to create fScaledBitmap
|
||||
}
|
||||
|
||||
SkASSERT(fResultBitmap.getPixels());
|
||||
fResultBitmap.setImmutable();
|
||||
if (!provider.isVolatile()) {
|
||||
if (SkBitmapCache::Add(desc, fResultBitmap)) {
|
||||
provider.notifyAddedToCache();
|
||||
}
|
||||
if (rec) {
|
||||
SkBitmapCache::Add(std::move(rec), &fResultBitmap);
|
||||
SkASSERT(fResultBitmap.getPixels());
|
||||
provider.notifyAddedToCache();
|
||||
}
|
||||
}
|
||||
|
||||
SkASSERT(fResultBitmap.getPixels());
|
||||
SkASSERT(fResultBitmap.isImmutable());
|
||||
|
||||
fInvMatrix.postScale(SkIntToScalar(dstW) / provider.width(),
|
||||
SkIntToScalar(dstH) / provider.height());
|
||||
|
@ -108,17 +108,25 @@ SkImageCacherator::SkImageCacherator(Validator* validator)
|
||||
, fInfo(validator->fInfo)
|
||||
, fOrigin(validator->fOrigin)
|
||||
{
|
||||
fUniqueIDs[kLegacy_CachedFormat] = validator->fUniqueID;
|
||||
for (int i = 1; i < kNumCachedFormats; ++i) {
|
||||
// We lazily allocate IDs for non-default caching cases
|
||||
fUniqueIDs[i] = kNeedNewImageUniqueID;
|
||||
}
|
||||
SkASSERT(fSharedGenerator);
|
||||
SkASSERT(fInfo.colorType() != kIndex_8_SkColorType);
|
||||
// We explicit set the legacy format slot, but leave the others uninitialized (via SkOnce)
|
||||
// and only resolove them to IDs as needed (by calling getUniqueID()).
|
||||
fIDRecs[kLegacy_CachedFormat].fOnce([this, validator] {
|
||||
fIDRecs[kLegacy_CachedFormat].fUniqueID = validator->fUniqueID;
|
||||
});
|
||||
}
|
||||
|
||||
SkImageCacherator::~SkImageCacherator() {}
|
||||
|
||||
uint32_t SkImageCacherator::getUniqueID(CachedFormat format) const {
|
||||
IDRec* rec = &fIDRecs[format];
|
||||
rec->fOnce([rec] {
|
||||
rec->fUniqueID = SkNextID::ImageID();
|
||||
});
|
||||
return rec->fUniqueID;
|
||||
}
|
||||
|
||||
SkData* SkImageCacherator::refEncoded(GrContext* ctx) {
|
||||
ScopedGenerator generator(fSharedGenerator);
|
||||
return generator->refEncodedData(ctx);
|
||||
@ -131,61 +139,6 @@ static bool check_output_bitmap(const SkBitmap& bitmap, uint32_t expectedID) {
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool reset_and_return_false(SkBitmap* bitmap) {
|
||||
bitmap->reset();
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool try_generate_bitmap(SkImageGenerator* gen, SkBitmap* bitmap, const SkImageInfo& info,
|
||||
SkBitmap::Allocator* allocator) {
|
||||
SkASSERT(info.colorType() != kIndex_8_SkColorType);
|
||||
if (0 == info.getSafeSize(info.minRowBytes())) {
|
||||
return false;
|
||||
}
|
||||
if (!bitmap->setInfo(info)) {
|
||||
return reset_and_return_false(bitmap);
|
||||
}
|
||||
if (!bitmap->tryAllocPixels(allocator, nullptr)) {
|
||||
return reset_and_return_false(bitmap);
|
||||
}
|
||||
SkASSERT(bitmap->getPixels()); // we're already locked
|
||||
|
||||
if (!gen->getPixels(bitmap->info(), bitmap->getPixels(), bitmap->rowBytes())) {
|
||||
return reset_and_return_false(bitmap);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Note, this returns a new, mutable, bitmap, with a new genID.
|
||||
// If you want the immutable bitmap with the same ID as our cacherator, call tryLockAsBitmap()
|
||||
//
|
||||
bool SkImageCacherator::generateBitmap(SkBitmap* bitmap, const SkImageInfo& decodeInfo) {
|
||||
SkBitmap::Allocator* allocator = SkResourceCache::GetAllocator();
|
||||
|
||||
ScopedGenerator generator(fSharedGenerator);
|
||||
const SkImageInfo& genInfo = generator->getInfo();
|
||||
if (decodeInfo.dimensions() == genInfo.dimensions()) {
|
||||
SkASSERT(fOrigin.x() == 0 && fOrigin.y() == 0);
|
||||
// fast-case, no copy needed
|
||||
return try_generate_bitmap(generator, bitmap, decodeInfo, allocator);
|
||||
} else {
|
||||
// need to handle subsetting, so we first generate the full size version, and then
|
||||
// "read" from it to get our subset. See https://bug.skia.org/4213
|
||||
|
||||
SkBitmap full;
|
||||
if (!try_generate_bitmap(generator, &full,
|
||||
decodeInfo.makeWH(genInfo.width(), genInfo.height()), allocator)) {
|
||||
return false;
|
||||
}
|
||||
SkASSERT(decodeInfo.colorType() != kIndex_8_SkColorType);
|
||||
if (!bitmap->tryAllocPixels(decodeInfo)) {
|
||||
return false;
|
||||
}
|
||||
return full.readPixels(bitmap->info(), bitmap->getPixels(), bitmap->rowBytes(),
|
||||
fOrigin.x(), fOrigin.y());
|
||||
}
|
||||
}
|
||||
|
||||
bool SkImageCacherator::directGeneratePixels(const SkImageInfo& info, void* pixels, size_t rb,
|
||||
int srcX, int srcY,
|
||||
SkTransferFunctionBehavior behavior) {
|
||||
@ -204,10 +157,46 @@ bool SkImageCacherator::directGeneratePixels(const SkImageInfo& info, void* pixe
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
bool SkImageCacherator::lockAsBitmapOnlyIfAlreadyCached(SkBitmap* bitmap, CachedFormat format) {
|
||||
return kNeedNewImageUniqueID != fUniqueIDs[format] &&
|
||||
SkBitmapCache::Find(SkBitmapCacheDesc::Make(fUniqueIDs[format],
|
||||
fInfo.width(), fInfo.height()), bitmap) &&
|
||||
check_output_bitmap(*bitmap, fUniqueIDs[format]);
|
||||
uint32_t uniqueID = this->getUniqueID(format);
|
||||
return SkBitmapCache::Find(SkBitmapCacheDesc::Make(uniqueID,
|
||||
fInfo.width(), fInfo.height()), bitmap) &&
|
||||
check_output_bitmap(*bitmap, uniqueID);
|
||||
}
|
||||
|
||||
static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
|
||||
const int genW = gen->getInfo().width();
|
||||
const int genH = gen->getInfo().height();
|
||||
const SkIRect srcR = SkIRect::MakeWH(genW, genH);
|
||||
const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
|
||||
if (!srcR.contains(dstR)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If they are requesting a subset, we have to have a temp allocation for full image, and
|
||||
// then copy the subset into their allocation
|
||||
SkBitmap full;
|
||||
SkPixmap fullPM;
|
||||
const SkPixmap* dstPM = &pmap;
|
||||
if (srcR != dstR) {
|
||||
if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
|
||||
return false;
|
||||
}
|
||||
if (!full.peekPixels(&fullPM)) {
|
||||
return false;
|
||||
}
|
||||
dstPM = &fullPM;
|
||||
}
|
||||
|
||||
if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (srcR != dstR) {
|
||||
if (!full.readPixels(pmap, originX, originY)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool SkImageCacherator::tryLockAsBitmap(SkBitmap* bitmap, const SkImage* client,
|
||||
@ -216,20 +205,44 @@ bool SkImageCacherator::tryLockAsBitmap(SkBitmap* bitmap, const SkImage* client,
|
||||
if (this->lockAsBitmapOnlyIfAlreadyCached(bitmap, format)) {
|
||||
return true;
|
||||
}
|
||||
if (!this->generateBitmap(bitmap, info)) {
|
||||
|
||||
uint32_t uniqueID = this->getUniqueID(format);
|
||||
|
||||
SkBitmap tmpBitmap;
|
||||
SkBitmapCache::RecPtr cacheRec;
|
||||
SkPixmap pmap;
|
||||
if (SkImage::kAllow_CachingHint == chint) {
|
||||
auto desc = SkBitmapCacheDesc::Make(uniqueID, info.width(), info.height());
|
||||
cacheRec = SkBitmapCache::Alloc(desc, info, &pmap);
|
||||
if (!cacheRec) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!tmpBitmap.tryAllocPixels(info)) {
|
||||
return false;
|
||||
}
|
||||
if (!tmpBitmap.peekPixels(&pmap)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
ScopedGenerator generator(fSharedGenerator);
|
||||
if (!generate_pixels(generator, pmap, fOrigin.x(), fOrigin.y())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (kNeedNewImageUniqueID == fUniqueIDs[format]) {
|
||||
fUniqueIDs[format] = SkNextID::ImageID();
|
||||
}
|
||||
bitmap->pixelRef()->setImmutableWithID(fUniqueIDs[format]);
|
||||
if (SkImage::kAllow_CachingHint == chint) {
|
||||
SkBitmapCache::Add(SkBitmapCacheDesc::Make(fUniqueIDs[format],
|
||||
fInfo.width(), fInfo.height()), *bitmap);
|
||||
if (cacheRec) {
|
||||
SkBitmapCache::Add(std::move(cacheRec), bitmap);
|
||||
SkASSERT(bitmap->getPixels()); // we're locked
|
||||
SkASSERT(bitmap->isImmutable());
|
||||
SkASSERT(bitmap->getGenerationID() == uniqueID);
|
||||
if (client) {
|
||||
as_IB(client)->notifyAddedToCache();
|
||||
}
|
||||
} else {
|
||||
*bitmap = tmpBitmap;
|
||||
bitmap->lockPixels();
|
||||
bitmap->pixelRef()->setImmutableWithID(uniqueID);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -239,13 +252,10 @@ bool SkImageCacherator::lockAsBitmap(GrContext* context, SkBitmap* bitmap, const
|
||||
SkImage::CachingHint chint) {
|
||||
CachedFormat format = this->chooseCacheFormat(dstColorSpace);
|
||||
SkImageInfo cacheInfo = this->buildCacheInfo(format);
|
||||
|
||||
if (kNeedNewImageUniqueID == fUniqueIDs[format]) {
|
||||
fUniqueIDs[format] = SkNextID::ImageID();
|
||||
}
|
||||
const uint32_t uniqueID = this->getUniqueID(format);
|
||||
|
||||
if (this->tryLockAsBitmap(bitmap, client, chint, format, cacheInfo)) {
|
||||
return check_output_bitmap(*bitmap, fUniqueIDs[format]);
|
||||
return check_output_bitmap(*bitmap, uniqueID);
|
||||
}
|
||||
|
||||
#if SK_SUPPORT_GPU
|
||||
@ -266,9 +276,20 @@ bool SkImageCacherator::lockAsBitmap(GrContext* context, SkBitmap* bitmap, const
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!bitmap->tryAllocPixels(cacheInfo)) {
|
||||
bitmap->reset();
|
||||
return false;
|
||||
const auto desc = SkBitmapCacheDesc::Make(uniqueID, fInfo.width(), fInfo.height());
|
||||
SkBitmapCache::RecPtr rec;
|
||||
SkPixmap pmap;
|
||||
if (SkImage::kAllow_CachingHint == chint) {
|
||||
rec = SkBitmapCache::Alloc(desc, cacheInfo, &pmap);
|
||||
if (!rec) {
|
||||
bitmap->reset();
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!bitmap->tryAllocPixels(cacheInfo)) {
|
||||
bitmap->reset();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
sk_sp<GrSurfaceContext> sContext(context->contextPriv().makeWrappedSurfaceContext(
|
||||
@ -279,20 +300,18 @@ bool SkImageCacherator::lockAsBitmap(GrContext* context, SkBitmap* bitmap, const
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!sContext->readPixels(bitmap->info(), bitmap->getPixels(), bitmap->rowBytes(), 0, 0)) {
|
||||
if (!sContext->readPixels(pmap.info(), pmap.writable_addr(), pmap.rowBytes(), 0, 0)) {
|
||||
bitmap->reset();
|
||||
return false;
|
||||
}
|
||||
|
||||
bitmap->pixelRef()->setImmutableWithID(fUniqueIDs[format]);
|
||||
if (SkImage::kAllow_CachingHint == chint) {
|
||||
SkBitmapCache::Add(SkBitmapCacheDesc::Make(fUniqueIDs[format],
|
||||
fInfo.width(), fInfo.height()), *bitmap);
|
||||
if (rec) {
|
||||
SkBitmapCache::Add(std::move(rec), bitmap);
|
||||
if (client) {
|
||||
as_IB(client)->notifyAddedToCache();
|
||||
}
|
||||
}
|
||||
return check_output_bitmap(*bitmap, fUniqueIDs[format]);
|
||||
return check_output_bitmap(*bitmap, uniqueID);
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
|
@ -31,7 +31,7 @@ public:
|
||||
~SkImageCacherator();
|
||||
|
||||
const SkImageInfo& info() const { return fInfo; }
|
||||
uint32_t uniqueID() const { return fUniqueIDs[kLegacy_CachedFormat]; }
|
||||
uint32_t uniqueID() const { return this->getUniqueID(kLegacy_CachedFormat); }
|
||||
|
||||
enum CachedFormat {
|
||||
kLegacy_CachedFormat, // The format from the generator, with any color space stripped out
|
||||
@ -130,7 +130,6 @@ private:
|
||||
CachedFormat chooseCacheFormat(SkColorSpace* dstColorSpace, const GrCaps* = nullptr);
|
||||
SkImageInfo buildCacheInfo(CachedFormat);
|
||||
|
||||
bool generateBitmap(SkBitmap*, const SkImageInfo&);
|
||||
bool tryLockAsBitmap(SkBitmap*, const SkImage*, SkImage::CachingHint, CachedFormat,
|
||||
const SkImageInfo&);
|
||||
#if SK_SUPPORT_GPU
|
||||
@ -152,7 +151,14 @@ private:
|
||||
sk_sp<SharedGenerator> fSharedGenerator;
|
||||
const SkImageInfo fInfo;
|
||||
const SkIPoint fOrigin;
|
||||
uint32_t fUniqueIDs[kNumCachedFormats];
|
||||
|
||||
struct IDRec {
|
||||
SkOnce fOnce;
|
||||
uint32_t fUniqueID;
|
||||
};
|
||||
mutable IDRec fIDRecs[kNumCachedFormats];
|
||||
|
||||
uint32_t getUniqueID(CachedFormat) const;
|
||||
|
||||
friend class GrImageTextureMaker;
|
||||
friend class SkImage;
|
||||
|
@ -5,11 +5,11 @@
|
||||
* found in the LICENSE file.
|
||||
*/
|
||||
|
||||
#include "SkDiscardableMemory.h"
|
||||
#include "SkMessageBus.h"
|
||||
#include "SkMipMap.h"
|
||||
#include "SkMutex.h"
|
||||
#include "SkOpts.h"
|
||||
#include "SkPixelRef.h"
|
||||
#include "SkResourceCache.h"
|
||||
#include "SkTraceMemoryDump.h"
|
||||
|
||||
@ -75,140 +75,15 @@ void SkResourceCache::init() {
|
||||
fTotalBytesUsed = 0;
|
||||
fCount = 0;
|
||||
fSingleAllocationByteLimit = 0;
|
||||
fAllocator = nullptr;
|
||||
|
||||
// One of these should be explicit set by the caller after we return.
|
||||
fTotalByteLimit = 0;
|
||||
fDiscardableFactory = nullptr;
|
||||
}
|
||||
|
||||
#include "SkDiscardableMemory.h"
|
||||
|
||||
class SkOneShotDiscardablePixelRef : public SkPixelRef {
|
||||
public:
|
||||
|
||||
// Ownership of the discardablememory is transfered to the pixelref
|
||||
// The pixelref will ref() the colortable (if not NULL), and unref() in destructor
|
||||
SkOneShotDiscardablePixelRef(const SkImageInfo&, SkDiscardableMemory*, size_t rowBytes,
|
||||
SkColorTable*);
|
||||
~SkOneShotDiscardablePixelRef() override;
|
||||
|
||||
protected:
|
||||
bool onNewLockPixels(LockRec*) override;
|
||||
void onUnlockPixels() override;
|
||||
size_t getAllocatedSizeInBytes() const override;
|
||||
|
||||
SkDiscardableMemory* diagnostic_only_getDiscardable() const override { return fDM; }
|
||||
|
||||
private:
|
||||
SkDiscardableMemory* fDM;
|
||||
size_t fRB;
|
||||
bool fFirstTime;
|
||||
SkColorTable* fCTable;
|
||||
|
||||
typedef SkPixelRef INHERITED;
|
||||
};
|
||||
|
||||
SkOneShotDiscardablePixelRef::SkOneShotDiscardablePixelRef(const SkImageInfo& info,
|
||||
SkDiscardableMemory* dm,
|
||||
size_t rowBytes,
|
||||
SkColorTable* ctable)
|
||||
: INHERITED(info)
|
||||
, fDM(dm)
|
||||
, fRB(rowBytes)
|
||||
, fCTable(ctable)
|
||||
{
|
||||
SkASSERT(dm->data());
|
||||
fFirstTime = true;
|
||||
SkSafeRef(ctable);
|
||||
}
|
||||
|
||||
SkOneShotDiscardablePixelRef::~SkOneShotDiscardablePixelRef() {
|
||||
delete fDM;
|
||||
SkSafeUnref(fCTable);
|
||||
}
|
||||
|
||||
bool SkOneShotDiscardablePixelRef::onNewLockPixels(LockRec* rec) {
|
||||
if (fFirstTime) {
|
||||
// we're already locked
|
||||
SkASSERT(fDM->data());
|
||||
fFirstTime = false;
|
||||
goto SUCCESS;
|
||||
}
|
||||
|
||||
// A previous call to onUnlock may have deleted our DM, so check for that
|
||||
if (nullptr == fDM) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!fDM->lock()) {
|
||||
// since it failed, we delete it now, to free-up the resource
|
||||
delete fDM;
|
||||
fDM = nullptr;
|
||||
return false;
|
||||
}
|
||||
|
||||
SUCCESS:
|
||||
rec->fPixels = fDM->data();
|
||||
rec->fColorTable = fCTable;
|
||||
rec->fRowBytes = fRB;
|
||||
return true;
|
||||
}
|
||||
|
||||
void SkOneShotDiscardablePixelRef::onUnlockPixels() {
|
||||
SkASSERT(!fFirstTime);
|
||||
fDM->unlock();
|
||||
}
|
||||
|
||||
size_t SkOneShotDiscardablePixelRef::getAllocatedSizeInBytes() const {
|
||||
return this->info().getSafeSize(fRB);
|
||||
}
|
||||
|
||||
class SkResourceCacheDiscardableAllocator : public SkBitmap::Allocator {
|
||||
public:
|
||||
SkResourceCacheDiscardableAllocator(SkResourceCache::DiscardableFactory factory) {
|
||||
SkASSERT(factory);
|
||||
fFactory = factory;
|
||||
}
|
||||
|
||||
bool allocPixelRef(SkBitmap*, SkColorTable*) override;
|
||||
|
||||
private:
|
||||
SkResourceCache::DiscardableFactory fFactory;
|
||||
};
|
||||
|
||||
bool SkResourceCacheDiscardableAllocator::allocPixelRef(SkBitmap* bitmap, SkColorTable* ctable) {
|
||||
size_t size = bitmap->getSize();
|
||||
uint64_t size64 = bitmap->computeSize64();
|
||||
if (0 == size || size64 > (uint64_t)size) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (kIndex_8_SkColorType == bitmap->colorType()) {
|
||||
if (!ctable) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
ctable = nullptr;
|
||||
}
|
||||
|
||||
SkDiscardableMemory* dm = fFactory(size);
|
||||
if (nullptr == dm) {
|
||||
return false;
|
||||
}
|
||||
|
||||
SkImageInfo info = bitmap->info();
|
||||
bitmap->setPixelRef(
|
||||
sk_make_sp<SkOneShotDiscardablePixelRef>(info, dm, bitmap->rowBytes(), ctable), 0, 0);
|
||||
bitmap->lockPixels();
|
||||
return bitmap->readyToDraw();
|
||||
}
|
||||
|
||||
SkResourceCache::SkResourceCache(DiscardableFactory factory) {
|
||||
this->init();
|
||||
fDiscardableFactory = factory;
|
||||
|
||||
fAllocator = new SkResourceCacheDiscardableAllocator(factory);
|
||||
}
|
||||
|
||||
SkResourceCache::SkResourceCache(size_t byteLimit) {
|
||||
@ -217,8 +92,6 @@ SkResourceCache::SkResourceCache(size_t byteLimit) {
|
||||
}
|
||||
|
||||
SkResourceCache::~SkResourceCache() {
|
||||
SkSafeUnref(fAllocator);
|
||||
|
||||
Rec* rec = fHead;
|
||||
while (rec) {
|
||||
Rec* next = rec->fNext;
|
||||
@ -258,18 +131,27 @@ static void make_size_str(size_t size, SkString* str) {
|
||||
|
||||
static bool gDumpCacheTransactions;
|
||||
|
||||
void SkResourceCache::add(Rec* rec) {
|
||||
void SkResourceCache::add(Rec* rec, void* payload) {
|
||||
this->checkMessages();
|
||||
|
||||
SkASSERT(rec);
|
||||
// See if we already have this key (racy inserts, etc.)
|
||||
if (nullptr != fHash->find(rec->getKey())) {
|
||||
delete rec;
|
||||
return;
|
||||
if (Rec** preexisting = fHash->find(rec->getKey())) {
|
||||
Rec* prev = *preexisting;
|
||||
if (prev->canBePurged()) {
|
||||
// if it can be purged, the install may fail, so we have to remove it
|
||||
this->remove(prev);
|
||||
} else {
|
||||
// if it cannot be purged, we reuse it and delete the new one
|
||||
prev->postAddInstall(payload);
|
||||
delete rec;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this->addToHead(rec);
|
||||
fHash->set(rec);
|
||||
rec->postAddInstall(payload);
|
||||
|
||||
if (gDumpCacheTransactions) {
|
||||
SkString bytesStr, totalStr;
|
||||
@ -284,6 +166,7 @@ void SkResourceCache::add(Rec* rec) {
|
||||
}
|
||||
|
||||
void SkResourceCache::remove(Rec* rec) {
|
||||
SkASSERT(rec->canBePurged());
|
||||
size_t used = rec->bytesUsed();
|
||||
SkASSERT(used <= fTotalBytesUsed);
|
||||
|
||||
@ -293,6 +176,8 @@ void SkResourceCache::remove(Rec* rec) {
|
||||
fTotalBytesUsed -= used;
|
||||
fCount -= 1;
|
||||
|
||||
//SkDebugf("-RC count [%3d] bytes %d\n", fCount, fTotalBytesUsed);
|
||||
|
||||
if (gDumpCacheTransactions) {
|
||||
SkString bytesStr, totalStr;
|
||||
make_size_str(used, &bytesStr);
|
||||
@ -323,7 +208,9 @@ void SkResourceCache::purgeAsNeeded(bool forcePurge) {
|
||||
}
|
||||
|
||||
Rec* prev = rec->fPrev;
|
||||
this->remove(rec);
|
||||
if (rec->canBePurged()) {
|
||||
this->remove(rec);
|
||||
}
|
||||
rec = prev;
|
||||
}
|
||||
}
|
||||
@ -350,8 +237,11 @@ void SkResourceCache::purgeSharedID(uint64_t sharedID) {
|
||||
while (rec) {
|
||||
Rec* prev = rec->fPrev;
|
||||
if (rec->getKey().getSharedID() == sharedID) {
|
||||
// SkDebugf("purgeSharedID id=%llx rec=%p\n", sharedID, rec);
|
||||
this->remove(rec);
|
||||
// even though the "src" is now dead, caches could still be in-flight, so
|
||||
// we have to check if it can be removed.
|
||||
if (rec->canBePurged()) {
|
||||
this->remove(rec);
|
||||
}
|
||||
#ifdef SK_TRACK_PURGE_SHAREDID_HITRATE
|
||||
found = true;
|
||||
#endif
|
||||
@ -587,11 +477,6 @@ SkResourceCache::DiscardableFactory SkResourceCache::GetDiscardableFactory() {
|
||||
return get_cache()->discardableFactory();
|
||||
}
|
||||
|
||||
SkBitmap::Allocator* SkResourceCache::GetAllocator() {
|
||||
SkAutoMutexAcquire am(gMutex);
|
||||
return get_cache()->allocator();
|
||||
}
|
||||
|
||||
SkCachedData* SkResourceCache::NewCachedData(size_t bytes) {
|
||||
SkAutoMutexAcquire am(gMutex);
|
||||
return get_cache()->newCachedData(bytes);
|
||||
@ -627,9 +512,9 @@ bool SkResourceCache::Find(const Key& key, FindVisitor visitor, void* context) {
|
||||
return get_cache()->find(key, visitor, context);
|
||||
}
|
||||
|
||||
void SkResourceCache::Add(Rec* rec) {
|
||||
void SkResourceCache::Add(Rec* rec, void* payload) {
|
||||
SkAutoMutexAcquire am(gMutex);
|
||||
get_cache()->add(rec);
|
||||
get_cache()->add(rec, payload);
|
||||
}
|
||||
|
||||
void SkResourceCache::VisitAll(Visitor visitor, void* context) {
|
||||
|
@ -82,6 +82,22 @@ public:
|
||||
virtual const Key& getKey() const = 0;
|
||||
virtual size_t bytesUsed() const = 0;
|
||||
|
||||
// Called if the cache needs to purge/remove/delete the Rec. Default returns true.
|
||||
// Subclass may return false if there are outstanding references to it (e.g. bitmaps).
|
||||
// Will only be deleted/removed-from-the-cache when this returns true.
|
||||
virtual bool canBePurged() { return true; }
|
||||
|
||||
// A rec is first created/initialized, and then added to the cache. As part of the add(),
|
||||
// the cache will callback into the rec with postAddInstall, passing in whatever payload
|
||||
// was passed to add/Add.
|
||||
//
|
||||
// This late-install callback exists because the process of add-ing might end up deleting
|
||||
// the new rec (if an existing rec in the cache has the same key and cannot be purged).
|
||||
// If the new rec will be deleted during add, the pre-existing one (with the same key)
|
||||
// will have postAddInstall() called on it instead, so that either way an "install" will
|
||||
// happen during the add.
|
||||
virtual void postAddInstall(void*) {}
|
||||
|
||||
// for memory usage diagnostics
|
||||
virtual const char* getCategory() const = 0;
|
||||
virtual SkDiscardableMemory* diagnostic_only_getDiscardable() const { return nullptr; }
|
||||
@ -135,7 +151,7 @@ public:
|
||||
* false : Rec is "stale" -- the cache will purge it.
|
||||
*/
|
||||
static bool Find(const Key& key, FindVisitor, void* context);
|
||||
static void Add(Rec*);
|
||||
static void Add(Rec*, void* payload = nullptr);
|
||||
|
||||
typedef void (*Visitor)(const Rec&, void* context);
|
||||
// Call the visitor for every Rec in the cache.
|
||||
@ -163,12 +179,6 @@ public:
|
||||
*/
|
||||
static DiscardableFactory GetDiscardableFactory();
|
||||
|
||||
/**
|
||||
* Use this allocator for bitmaps, so they can use ashmem when available.
|
||||
* Returns nullptr if the ResourceCache has not been initialized with a DiscardableFactory.
|
||||
*/
|
||||
static SkBitmap::Allocator* GetAllocator();
|
||||
|
||||
static SkCachedData* NewCachedData(size_t bytes);
|
||||
|
||||
static void PostPurgeSharedID(uint64_t sharedID);
|
||||
@ -208,7 +218,7 @@ public:
|
||||
* false : Rec is "stale" -- the cache will purge it.
|
||||
*/
|
||||
bool find(const Key&, FindVisitor, void* context);
|
||||
void add(Rec*);
|
||||
void add(Rec*, void* payload = nullptr);
|
||||
void visitAll(Visitor, void* context);
|
||||
|
||||
size_t getTotalBytesUsed() const { return fTotalBytesUsed; }
|
||||
@ -239,7 +249,6 @@ public:
|
||||
}
|
||||
|
||||
DiscardableFactory discardableFactory() const { return fDiscardableFactory; }
|
||||
SkBitmap::Allocator* allocator() const { return fAllocator; }
|
||||
|
||||
SkCachedData* newCachedData(size_t bytes);
|
||||
|
||||
@ -256,8 +265,6 @@ private:
|
||||
Hash* fHash;
|
||||
|
||||
DiscardableFactory fDiscardableFactory;
|
||||
// the allocator is nullptr or one that matches discardables
|
||||
SkBitmap::Allocator* fAllocator;
|
||||
|
||||
size_t fTotalBytesUsed;
|
||||
size_t fTotalByteLimit;
|
||||
|
@ -410,10 +410,11 @@ public:
|
||||
return true;
|
||||
}
|
||||
|
||||
SkPixmap pmap;
|
||||
SkImageInfo info = SkImageInfo::MakeN32(this->width(), this->height(),
|
||||
this->alphaType(), fColorSpace);
|
||||
|
||||
if (!dst->tryAllocPixels(info)) {
|
||||
auto rec = SkBitmapCache::Alloc(desc, info, &pmap);
|
||||
if (!rec) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -423,12 +424,11 @@ public:
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!sContext->readPixels(info, dst->getPixels(), dst->rowBytes(), 0, 0)) {
|
||||
if (!sContext->readPixels(info, pmap.writable_addr(), pmap.rowBytes(), 0, 0)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
dst->pixelRef()->setImmutableWithID(this->uniqueID());
|
||||
SkBitmapCache::Add(desc, *dst);
|
||||
SkBitmapCache::Add(std::move(rec), dst);
|
||||
fAddedRasterVersionToCache.store(true);
|
||||
return true;
|
||||
}
|
||||
|
@ -77,8 +77,17 @@ bool SkImage_Gpu::getROPixels(SkBitmap* dst, SkColorSpace* dstColorSpace, Cachin
|
||||
|
||||
SkImageInfo ii = make_info(this->width(), this->height(), this->alphaType(),
|
||||
sk_ref_sp(dstColorSpace));
|
||||
if (!dst->tryAllocPixels(ii)) {
|
||||
return false;
|
||||
SkBitmapCache::RecPtr rec = nullptr;
|
||||
SkPixmap pmap;
|
||||
if (kAllow_CachingHint == chint) {
|
||||
rec = SkBitmapCache::Alloc(desc, ii, &pmap);
|
||||
if (!rec) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!dst->tryAllocPixels(ii)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
sk_sp<GrSurfaceContext> sContext = fContext->contextPriv().makeWrappedSurfaceContext(
|
||||
@ -88,13 +97,12 @@ bool SkImage_Gpu::getROPixels(SkBitmap* dst, SkColorSpace* dstColorSpace, Cachin
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!sContext->readPixels(dst->info(), dst->getPixels(), dst->rowBytes(), 0, 0)) {
|
||||
if (!sContext->readPixels(pmap.info(), pmap.writable_addr(), pmap.rowBytes(), 0, 0)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
dst->pixelRef()->setImmutableWithID(this->uniqueID());
|
||||
if (kAllow_CachingHint == chint) {
|
||||
SkBitmapCache::Add(desc, *dst);
|
||||
if (rec) {
|
||||
SkBitmapCache::Add(std::move(rec), dst);
|
||||
fAddedRasterVersionToCache.store(true);
|
||||
}
|
||||
return true;
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include "SkCanvas.h"
|
||||
#include "SkDiscardableMemoryPool.h"
|
||||
#include "SkGraphics.h"
|
||||
#include "SkMakeUnique.h"
|
||||
#include "SkMipMap.h"
|
||||
#include "SkPicture.h"
|
||||
#include "SkPictureRecorder.h"
|
||||
@ -201,3 +202,88 @@ DEF_TEST(BitmapCache_discarded_image, reporter) {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static void* gTestNamespace;
|
||||
|
||||
struct TestKey : SkResourceCache::Key {
|
||||
int32_t fData;
|
||||
|
||||
TestKey(int sharedID, int32_t data) : fData(data) {
|
||||
this->init(&gTestNamespace, sharedID, sizeof(fData));
|
||||
}
|
||||
};
|
||||
|
||||
struct TestRec : SkResourceCache::Rec {
|
||||
enum {
|
||||
kDidInstall = 1 << 0,
|
||||
};
|
||||
|
||||
TestKey fKey;
|
||||
int* fFlags;
|
||||
bool fCanBePurged;
|
||||
|
||||
TestRec(int sharedID, int32_t data, int* flagPtr) : fKey(sharedID, data), fFlags(flagPtr) {
|
||||
fCanBePurged = false;
|
||||
}
|
||||
|
||||
const Key& getKey() const override { return fKey; }
|
||||
size_t bytesUsed() const override { return 1024; /* just need a value */ }
|
||||
bool canBePurged() override { return fCanBePurged; }
|
||||
void postAddInstall(void*) override {
|
||||
*fFlags |= kDidInstall;
|
||||
}
|
||||
const char* getCategory() const override { return "test-category"; }
|
||||
};
|
||||
|
||||
static void test_duplicate_add(SkResourceCache* cache, skiatest::Reporter* reporter,
|
||||
bool purgable) {
|
||||
int sharedID = 1;
|
||||
int data = 0;
|
||||
|
||||
int flags0 = 0, flags1 = 0;
|
||||
|
||||
auto rec0 = skstd::make_unique<TestRec>(sharedID, data, &flags0);
|
||||
auto rec1 = skstd::make_unique<TestRec>(sharedID, data, &flags1);
|
||||
SkASSERT(rec0->getKey() == rec1->getKey());
|
||||
|
||||
TestRec* r0 = rec0.get(); // save the bare-pointer since we will release rec0
|
||||
r0->fCanBePurged = purgable;
|
||||
|
||||
REPORTER_ASSERT(reporter, !(flags0 & TestRec::kDidInstall));
|
||||
REPORTER_ASSERT(reporter, !(flags1 & TestRec::kDidInstall));
|
||||
|
||||
cache->add(rec0.release(), nullptr);
|
||||
REPORTER_ASSERT(reporter, flags0 & TestRec::kDidInstall);
|
||||
REPORTER_ASSERT(reporter, !(flags1 & TestRec::kDidInstall));
|
||||
flags0 = 0; // reset the flag
|
||||
|
||||
cache->add(rec1.release(), nullptr);
|
||||
if (purgable) {
|
||||
// we purged rec0, and did install rec1
|
||||
REPORTER_ASSERT(reporter, !(flags0 & TestRec::kDidInstall));
|
||||
REPORTER_ASSERT(reporter, flags1 & TestRec::kDidInstall);
|
||||
} else {
|
||||
// we re-used rec0 and did not install rec1
|
||||
REPORTER_ASSERT(reporter, flags0 & TestRec::kDidInstall);
|
||||
REPORTER_ASSERT(reporter, !(flags1 & TestRec::kDidInstall));
|
||||
r0->fCanBePurged = true; // so we can cleanup the cache
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Test behavior when the same key is added more than once.
|
||||
*/
|
||||
DEF_TEST(ResourceCache_purge, reporter) {
|
||||
for (bool purgable : { false, true }) {
|
||||
{
|
||||
SkResourceCache cache(1024 * 1024);
|
||||
test_duplicate_add(&cache, reporter, purgable);
|
||||
}
|
||||
{
|
||||
SkResourceCache cache(SkDiscardableMemory::Create);
|
||||
test_duplicate_add(&cache, reporter, purgable);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user