Use the SkBitmapHeap to handle SkBitmaps in SkGPipe cross process.

Required moving the LRU handles from SkBitmapHeapEntry to LookupEntry.

Allows simplification of drawBitmap* calls in SkGPipeCanvas.

Review URL: https://codereview.appspot.com/6460073

git-svn-id: http://skia.googlecode.com/svn/trunk@5063 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
scroggo@google.com 2012-08-13 16:39:42 +00:00
parent b8bf9ce103
commit 92967e9677
4 changed files with 218 additions and 190 deletions

View File

@ -15,9 +15,7 @@
SkBitmapHeapEntry::SkBitmapHeapEntry() SkBitmapHeapEntry::SkBitmapHeapEntry()
: fSlot(-1) : fSlot(-1)
, fRefCount(0) , fRefCount(0)
, fBytesAllocated(0) , fBytesAllocated(0) {
, fMoreRecentlyUsed(NULL)
, fLessRecentlyUsed(NULL) {
} }
SkBitmapHeapEntry::~SkBitmapHeapEntry() { SkBitmapHeapEntry::~SkBitmapHeapEntry() {
@ -37,6 +35,30 @@ void SkBitmapHeapEntry::addReferences(int count) {
/////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////
int SkBitmapHeap::LookupEntry::Compare(const SkBitmapHeap::LookupEntry *a,
const SkBitmapHeap::LookupEntry *b) {
if (a->fGenerationId < b->fGenerationId) {
return -1;
} else if (a->fGenerationId > b->fGenerationId) {
return 1;
} else if (a->fPixelOffset < b->fPixelOffset) {
return -1;
} else if (a->fPixelOffset > b->fPixelOffset) {
return 1;
} else if (a->fWidth < b->fWidth) {
return -1;
} else if (a->fWidth > b->fWidth) {
return 1;
} else if (a->fHeight < b->fHeight) {
return -1;
} else if (a->fHeight > b->fHeight) {
return 1;
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////
SkBitmapHeap::SkBitmapHeap(int32_t preferredSize, int32_t ownerCount) SkBitmapHeap::SkBitmapHeap(int32_t preferredSize, int32_t ownerCount)
: INHERITED() : INHERITED()
, fExternalStorage(NULL) , fExternalStorage(NULL)
@ -92,27 +114,35 @@ SkTRefArray<SkBitmap>* SkBitmapHeap::extractBitmaps() const {
return array; return array;
} }
// We just "used" the entry. Update our LRU accordingly void SkBitmapHeap::removeFromLRU(SkBitmapHeap::LookupEntry* entry) {
void SkBitmapHeap::setMostRecentlyUsed(SkBitmapHeapEntry* entry) { if (fMostRecentlyUsed == entry) {
SkASSERT(entry != NULL); fMostRecentlyUsed = entry->fLessRecentlyUsed;
if (entry == fMostRecentlyUsed) { if (NULL == fMostRecentlyUsed) {
return; SkASSERT(fLeastRecentlyUsed == entry);
} fLeastRecentlyUsed = NULL;
// Remove info from its prior place, and make sure to cover the hole. } else {
if (fLeastRecentlyUsed == entry) { fMostRecentlyUsed->fMoreRecentlyUsed = NULL;
}
} else {
// Remove entry from its prior place, and make sure to cover the hole.
if (fLeastRecentlyUsed == entry) {
SkASSERT(entry->fMoreRecentlyUsed != NULL);
fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
}
// Since we have already considered the case where entry is the most recently used, it must
// have a more recently used at this point.
SkASSERT(entry->fMoreRecentlyUsed != NULL); SkASSERT(entry->fMoreRecentlyUsed != NULL);
fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
}
if (entry->fMoreRecentlyUsed != NULL) {
SkASSERT(fMostRecentlyUsed != entry);
entry->fMoreRecentlyUsed->fLessRecentlyUsed = entry->fLessRecentlyUsed; entry->fMoreRecentlyUsed->fLessRecentlyUsed = entry->fLessRecentlyUsed;
}
if (entry->fLessRecentlyUsed != NULL) { if (entry->fLessRecentlyUsed != NULL) {
SkASSERT(fLeastRecentlyUsed != entry); SkASSERT(fLeastRecentlyUsed != entry);
entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed; entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed;
}
} }
entry->fMoreRecentlyUsed = NULL; entry->fMoreRecentlyUsed = NULL;
// Set up the head and tail pointers properly. }
void SkBitmapHeap::appendToLRU(SkBitmapHeap::LookupEntry* entry) {
if (fMostRecentlyUsed != NULL) { if (fMostRecentlyUsed != NULL) {
SkASSERT(NULL == fMostRecentlyUsed->fMoreRecentlyUsed); SkASSERT(NULL == fMostRecentlyUsed->fMoreRecentlyUsed);
fMostRecentlyUsed->fMoreRecentlyUsed = entry; fMostRecentlyUsed->fMoreRecentlyUsed = entry;
@ -125,19 +155,20 @@ void SkBitmapHeap::setMostRecentlyUsed(SkBitmapHeapEntry* entry) {
} }
// iterate through our LRU cache and try to find an entry to evict // iterate through our LRU cache and try to find an entry to evict
SkBitmapHeapEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) { SkBitmapHeap::LookupEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) {
SkASSERT(fPreferredCount != UNLIMITED_SIZE); SkASSERT(fPreferredCount != UNLIMITED_SIZE);
SkASSERT(fStorage.count() >= fPreferredCount); SkASSERT(fStorage.count() >= fPreferredCount);
SkBitmapHeapEntry* iter = fLeastRecentlyUsed; SkBitmapHeap::LookupEntry* iter = fLeastRecentlyUsed;
while (iter != NULL) { while (iter != NULL) {
if (iter->fRefCount > 0) { SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
if (heapEntry->fRefCount > 0) {
// If the least recently used bitmap has not been unreferenced // If the least recently used bitmap has not been unreferenced
// by its owner, then according to our LRU specifications a more // by its owner, then according to our LRU specifications a more
// recently used one can not have used all it's references yet either. // recently used one can not have used all it's references yet either.
return NULL; return NULL;
} }
if (replacement.pixelRef() && replacement.pixelRef() == iter->fBitmap.pixelRef()) { if (replacement.getGenerationID() == iter->fGenerationId) {
// Do not replace a bitmap with a new one using the same // Do not replace a bitmap with a new one using the same
// pixel ref. Instead look for a different one that will // pixel ref. Instead look for a different one that will
// potentially free up more space. // potentially free up more space.
@ -153,21 +184,22 @@ size_t SkBitmapHeap::freeMemoryIfPossible(size_t bytesToFree) {
if (UNLIMITED_SIZE == fPreferredCount) { if (UNLIMITED_SIZE == fPreferredCount) {
return 0; return 0;
} }
SkBitmapHeapEntry* iter = fLeastRecentlyUsed; LookupEntry* iter = fLeastRecentlyUsed;
size_t origBytesAllocated = fBytesAllocated; size_t origBytesAllocated = fBytesAllocated;
// Purge starting from LRU until a non-evictable bitmap is found or until // Purge starting from LRU until a non-evictable bitmap is found or until
// everything is evicted. // everything is evicted.
while (iter && 0 == iter->fRefCount) { while (iter != NULL) {
SkBitmapHeapEntry* next = iter->fMoreRecentlyUsed; SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
this->removeEntryFromLookupTable(*iter); if (heapEntry->fRefCount > 0) {
break;
}
LookupEntry* next = iter->fMoreRecentlyUsed;
this->removeEntryFromLookupTable(iter);
// Free the pixel memory. removeEntryFromLookupTable already reduced // Free the pixel memory. removeEntryFromLookupTable already reduced
// fBytesAllocated properly. // fBytesAllocated properly.
iter->fBitmap.reset(); heapEntry->fBitmap.reset();
// Add to list of unused slots which can be reused in the future. // Add to list of unused slots which can be reused in the future.
fUnusedSlots.push(iter->fSlot); fUnusedSlots.push(heapEntry->fSlot);
// Remove its LRU pointers, so that it does not pretend it is already in
// the list the next time it is used.
iter->fMoreRecentlyUsed = iter->fLessRecentlyUsed = NULL;
iter = next; iter = next;
if (origBytesAllocated - fBytesAllocated >= bytesToFree) { if (origBytesAllocated - fBytesAllocated >= bytesToFree) {
break; break;
@ -193,17 +225,17 @@ size_t SkBitmapHeap::freeMemoryIfPossible(size_t bytesToFree) {
} }
int SkBitmapHeap::findInLookupTable(const LookupEntry& indexEntry, SkBitmapHeapEntry** entry) { int SkBitmapHeap::findInLookupTable(const LookupEntry& indexEntry, SkBitmapHeapEntry** entry) {
int index = SkTSearch<const LookupEntry>(fLookupTable.begin(), int index = SkTSearch<const LookupEntry>((const LookupEntry**)fLookupTable.begin(),
fLookupTable.count(), fLookupTable.count(),
indexEntry, sizeof(indexEntry)); &indexEntry, sizeof(void*), LookupEntry::Compare);
if (index < 0) { if (index < 0) {
// insert ourselves into the bitmapIndex // insert ourselves into the bitmapIndex
index = ~index; index = ~index;
fLookupTable.insert(index, 1, &indexEntry); *fLookupTable.insert(index) = SkNEW_ARGS(LookupEntry, (indexEntry));
} else if (entry != NULL) { } else if (entry != NULL) {
// populate the entry if needed // populate the entry if needed
*entry = fStorage[fLookupTable[index].fStorageSlot]; *entry = fStorage[fLookupTable[index]->fStorageSlot];
} }
return index; return index;
@ -229,19 +261,16 @@ bool SkBitmapHeap::copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBi
return true; return true;
} }
int SkBitmapHeap::removeEntryFromLookupTable(const SkBitmapHeapEntry& entry) { int SkBitmapHeap::removeEntryFromLookupTable(LookupEntry* entry) {
// remove the bitmap index for the deleted entry // remove the bitmap index for the deleted entry
SkDEBUGCODE(int count = fLookupTable.count();) SkDEBUGCODE(int count = fLookupTable.count();)
// FIXME: If copying bitmaps retained the generation ID, we could int index = this->findInLookupTable(*entry, NULL);
// just grab the generation ID from entry.fBitmap
LookupEntry key(entry.fBitmap, entry.fGenerationID);
int index = this->findInLookupTable(key, NULL);
// Verify that findInLookupTable found an existing entry rather than adding // Verify that findInLookupTable found an existing entry rather than adding
// a new entry to the lookup table. // a new entry to the lookup table.
SkASSERT(count == fLookupTable.count()); SkASSERT(count == fLookupTable.count());
SkDELETE(fLookupTable[index]);
fLookupTable.remove(index); fLookupTable.remove(index);
fBytesAllocated -= entry.fBytesAllocated; fBytesAllocated -= fStorage[entry->fStorageSlot]->fBytesAllocated;
return index; return index;
} }
@ -249,13 +278,17 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
SkBitmapHeapEntry* entry = NULL; SkBitmapHeapEntry* entry = NULL;
int searchIndex = this->findInLookupTable(LookupEntry(originalBitmap), &entry); int searchIndex = this->findInLookupTable(LookupEntry(originalBitmap), &entry);
// check to see if we already had a copy of the bitmap in the heap
if (entry) { if (entry) {
// Already had a copy of the bitmap in the heap.
if (fOwnerCount != IGNORE_OWNERS) { if (fOwnerCount != IGNORE_OWNERS) {
entry->addReferences(fOwnerCount); entry->addReferences(fOwnerCount);
} }
if (fPreferredCount != UNLIMITED_SIZE) { if (fPreferredCount != UNLIMITED_SIZE) {
this->setMostRecentlyUsed(entry); LookupEntry* lookupEntry = fLookupTable[searchIndex];
if (lookupEntry != fMostRecentlyUsed) {
this->removeFromLRU(lookupEntry);
this->appendToLRU(lookupEntry);
}
} }
return entry->fSlot; return entry->fSlot;
} }
@ -263,10 +296,13 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
// decide if we need to evict an existing heap entry or create a new one // decide if we need to evict an existing heap entry or create a new one
if (fPreferredCount != UNLIMITED_SIZE && fStorage.count() >= fPreferredCount) { if (fPreferredCount != UNLIMITED_SIZE && fStorage.count() >= fPreferredCount) {
// iterate through our LRU cache and try to find an entry to evict // iterate through our LRU cache and try to find an entry to evict
entry = this->findEntryToReplace(originalBitmap); LookupEntry* lookupEntry = this->findEntryToReplace(originalBitmap);
// we found an entry to evict if (lookupEntry != NULL) {
if (entry) { // we found an entry to evict
int index = this->removeEntryFromLookupTable(*entry); entry = fStorage[lookupEntry->fStorageSlot];
// Remove it from the LRU. The new entry will be added to the LRU later.
this->removeFromLRU(lookupEntry);
int index = this->removeEntryFromLookupTable(lookupEntry);
// update the current search index now that we have removed one // update the current search index now that we have removed one
if (index < searchIndex) { if (index < searchIndex) {
@ -300,6 +336,7 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
// if the copy failed then we must abort // if the copy failed then we must abort
if (!copySucceeded) { if (!copySucceeded) {
// delete the index // delete the index
SkDELETE(fLookupTable[searchIndex]);
fLookupTable.remove(searchIndex); fLookupTable.remove(searchIndex);
// If entry is the last slot in storage, it is safe to delete it. // If entry is the last slot in storage, it is safe to delete it.
if (fStorage.count() - 1 == entry->fSlot) { if (fStorage.count() - 1 == entry->fSlot) {
@ -307,14 +344,16 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
fStorage.remove(entry->fSlot); fStorage.remove(entry->fSlot);
fBytesAllocated -= sizeof(SkBitmapHeapEntry); fBytesAllocated -= sizeof(SkBitmapHeapEntry);
SkDELETE(entry); SkDELETE(entry);
} else {
fUnusedSlots.push(entry->fSlot);
} }
return INVALID_SLOT; return INVALID_SLOT;
} }
// update the index with the appropriate slot in the heap // update the index with the appropriate slot in the heap
fLookupTable[searchIndex].fStorageSlot = entry->fSlot; fLookupTable[searchIndex]->fStorageSlot = entry->fSlot;
// compute the space taken by the this entry // compute the space taken by this entry
// TODO if there is a shared pixel ref don't count it // TODO if there is a shared pixel ref don't count it
// If the SkBitmap does not share an SkPixelRef with an SkBitmap already // If the SkBitmap does not share an SkPixelRef with an SkBitmap already
// in the SharedHeap, also include the size of its pixels. // in the SharedHeap, also include the size of its pixels.
@ -323,13 +362,11 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
// add the bytes from this entry to the total count // add the bytes from this entry to the total count
fBytesAllocated += entry->fBytesAllocated; fBytesAllocated += entry->fBytesAllocated;
entry->fGenerationID = originalBitmap.getGenerationID();
if (fOwnerCount != IGNORE_OWNERS) { if (fOwnerCount != IGNORE_OWNERS) {
entry->addReferences(fOwnerCount); entry->addReferences(fOwnerCount);
} }
if (fPreferredCount != UNLIMITED_SIZE) { if (fPreferredCount != UNLIMITED_SIZE) {
this->setMostRecentlyUsed(entry); this->appendToLRU(fLookupTable[searchIndex]);
} }
return entry->fSlot; return entry->fSlot;
} }

View File

@ -39,16 +39,12 @@ private:
int32_t fSlot; int32_t fSlot;
int32_t fRefCount; int32_t fRefCount;
uint32_t fGenerationID;
SkBitmap fBitmap; SkBitmap fBitmap;
// Keep track of the bytes allocated for this bitmap. When replacing the // Keep track of the bytes allocated for this bitmap. When replacing the
// bitmap or removing this HeapEntry we know how much memory has been // bitmap or removing this HeapEntry we know how much memory has been
// reclaimed. // reclaimed.
size_t fBytesAllocated; size_t fBytesAllocated;
// TODO: Generalize the LRU caching mechanism
SkBitmapHeapEntry* fMoreRecentlyUsed;
SkBitmapHeapEntry* fLessRecentlyUsed;
friend class SkBitmapHeap; friend class SkBitmapHeap;
}; };
@ -176,7 +172,8 @@ public:
* Returns a count of the number of items currently in the heap * Returns a count of the number of items currently in the heap
*/ */
int count() const { int count() const {
SkASSERT(fExternalStorage != NULL || fStorage.count() == fLookupTable.count()); SkASSERT(fExternalStorage != NULL ||
fStorage.count() - fUnusedSlots.count() == fLookupTable.count());
return fLookupTable.count(); return fLookupTable.count();
} }
@ -197,43 +194,39 @@ public:
private: private:
struct LookupEntry { struct LookupEntry {
LookupEntry(const SkBitmap& bm, uint32_t genId = 0) { LookupEntry(const SkBitmap& bm)
fGenerationId = 0 == genId ? bm.getGenerationID() : genId; : fGenerationId(bm.getGenerationID())
fPixelOffset = bm.pixelRefOffset(); , fPixelOffset(bm.pixelRefOffset())
fWidth = bm.width(); , fWidth(bm.width())
fHeight = bm.height(); , fHeight(bm.height())
} , fMoreRecentlyUsed(NULL)
uint32_t fGenerationId; // SkPixelRef GenerationID. , fLessRecentlyUsed(NULL){}
size_t fPixelOffset;
uint32_t fWidth; const uint32_t fGenerationId; // SkPixelRef GenerationID.
uint32_t fHeight; const size_t fPixelOffset;
const uint32_t fWidth;
const uint32_t fHeight;
// TODO: Generalize the LRU caching mechanism
LookupEntry* fMoreRecentlyUsed;
LookupEntry* fLessRecentlyUsed;
uint32_t fStorageSlot; // slot of corresponding bitmap in fStorage. uint32_t fStorageSlot; // slot of corresponding bitmap in fStorage.
bool operator < (const LookupEntry& other) const { /**
if (this->fGenerationId != other.fGenerationId) { * Compare two LookupEntry pointers, returning -1, 0, 1 for sorting.
return this->fGenerationId < other.fGenerationId; */
} else if(this->fPixelOffset != other.fPixelOffset) { static int Compare(const LookupEntry* a, const LookupEntry* b);
return this->fPixelOffset < other.fPixelOffset;
} else if(this->fWidth != other.fWidth) {
return this->fWidth < other.fWidth;
} else {
return this->fHeight < other.fHeight;
}
}
bool operator != (const LookupEntry& other) const {
return this->fGenerationId != other.fGenerationId
|| this->fPixelOffset != other.fPixelOffset
|| this->fWidth != other.fWidth
|| this->fHeight != other.fHeight;
}
}; };
/** /**
* Remove the entry from the lookup table. * Remove the entry from the lookup table. Also deletes the entry pointed
* to by the table. Therefore, if a pointer to that one was passed in, the
* pointer should no longer be used, since the object to which it points has
* been deleted.
* @return The index in the lookup table of the entry before removal. * @return The index in the lookup table of the entry before removal.
*/ */
int removeEntryFromLookupTable(const SkBitmapHeapEntry&); int removeEntryFromLookupTable(LookupEntry*);
/** /**
* Searches for the bitmap in the lookup table and returns the bitmaps index within the table. * Searches for the bitmap in the lookup table and returns the bitmaps index within the table.
@ -245,12 +238,27 @@ private:
*/ */
int findInLookupTable(const LookupEntry& key, SkBitmapHeapEntry** entry); int findInLookupTable(const LookupEntry& key, SkBitmapHeapEntry** entry);
SkBitmapHeapEntry* findEntryToReplace(const SkBitmap& replacement); LookupEntry* findEntryToReplace(const SkBitmap& replacement);
bool copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBitmap); bool copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBitmap);
void setMostRecentlyUsed(SkBitmapHeapEntry* entry);
/**
* Remove a LookupEntry from the LRU, in preparation for either deleting or appending as most
* recent. Points the LookupEntry's old neighbors at each other, and sets fLeastRecentlyUsed
* (if there is still an entry left). Sets LookupEntry's fMoreRecentlyUsed to NULL and leaves
* its fLessRecentlyUsed unmodified.
*/
void removeFromLRU(LookupEntry* entry);
/**
* Append a LookupEntry to the end of the LRU cache, marking it as the most
* recently used. Assumes that the LookupEntry is already in fLookupTable,
* but is not in the LRU cache. If it is in the cache, removeFromLRU should
* be called first.
*/
void appendToLRU(LookupEntry*);
// searchable index that maps to entries in the heap // searchable index that maps to entries in the heap
SkTDArray<LookupEntry> fLookupTable; SkTDArray<LookupEntry*> fLookupTable;
// heap storage // heap storage
SkTDArray<SkBitmapHeapEntry*> fStorage; SkTDArray<SkBitmapHeapEntry*> fStorage;
@ -259,8 +267,8 @@ private:
SkTDArray<int> fUnusedSlots; SkTDArray<int> fUnusedSlots;
ExternalStorage* fExternalStorage; ExternalStorage* fExternalStorage;
SkBitmapHeapEntry* fMostRecentlyUsed; LookupEntry* fMostRecentlyUsed;
SkBitmapHeapEntry* fLeastRecentlyUsed; LookupEntry* fLeastRecentlyUsed;
const int32_t fPreferredCount; const int32_t fPreferredCount;
const int32_t fOwnerCount; const int32_t fOwnerCount;

View File

@ -113,7 +113,6 @@ public:
} }
void addBitmap(int index) { void addBitmap(int index) {
index--;
SkBitmap* bm; SkBitmap* bm;
if(fBitmaps.count() == index) { if(fBitmaps.count() == index) {
bm = SkNEW(SkBitmap); bm = SkNEW(SkBitmap);
@ -125,7 +124,7 @@ public:
} }
SkBitmap* getBitmap(unsigned index) { SkBitmap* getBitmap(unsigned index) {
return fBitmaps[index - 1]; return fBitmaps[index];
} }
void setSharedHeap(SkBitmapHeap* heap) { void setSharedHeap(SkBitmapHeap* heap) {

View File

@ -163,6 +163,15 @@ public:
if (this->needOpBytes()) { if (this->needOpBytes()) {
this->writeOp(kDone_DrawOp); this->writeOp(kDone_DrawOp);
this->doNotify(); this->doNotify();
if (shouldFlattenBitmaps(fFlags)) {
// In this case, a BitmapShuttle is reffed by the SharedHeap
// and refs this canvas. Unref the SharedHeap to end the
// circular reference. When shouldFlattenBitmaps is false,
// there is no circular reference, so the SharedHeap can be
// safely unreffed in the destructor.
fSharedHeap->unref();
fSharedHeap = NULL;
}
} }
fDone = true; fDone = true;
} }
@ -172,11 +181,6 @@ public:
size_t freeMemoryIfPossible(size_t bytesToFree); size_t freeMemoryIfPossible(size_t bytesToFree);
size_t storageAllocatedForRecording() { size_t storageAllocatedForRecording() {
// FIXME: This can be removed once fSharedHeap is used by cross process
// case.
if (NULL == fSharedHeap) {
return 0;
}
return fSharedHeap->bytesAllocated(); return fSharedHeap->bytesAllocated();
} }
@ -231,6 +235,11 @@ public:
const SkPaint&) SK_OVERRIDE; const SkPaint&) SK_OVERRIDE;
virtual void drawData(const void*, size_t) SK_OVERRIDE; virtual void drawData(const void*, size_t) SK_OVERRIDE;
/**
* Flatten an SkBitmap to send to the reader, where it will be referenced
* according to slot.
*/
bool shuttleBitmap(const SkBitmap&, int32_t slot);
private: private:
enum { enum {
kNoSaveLayer = -1, kNoSaveLayer = -1,
@ -243,7 +252,7 @@ private:
size_t fBlockSize; // amount allocated for writer size_t fBlockSize; // amount allocated for writer
size_t fBytesNotified; size_t fBytesNotified;
bool fDone; bool fDone;
uint32_t fFlags; const uint32_t fFlags;
SkRefCntSet fTypefaceSet; SkRefCntSet fTypefaceSet;
@ -273,27 +282,15 @@ private:
// if a new SkFlatData was added when in cross process mode // if a new SkFlatData was added when in cross process mode
void flattenFactoryNames(); void flattenFactoryNames();
// These are only used when in cross process, but with no shared address
// space, so bitmaps are flattened.
FlattenableHeap fBitmapHeap;
SkBitmapDictionary fBitmapDictionary;
int flattenToIndex(const SkBitmap&);
FlattenableHeap fFlattenableHeap; FlattenableHeap fFlattenableHeap;
FlatDictionary fFlatDictionary; FlatDictionary fFlatDictionary;
int fCurrFlatIndex[kCount_PaintFlats]; int fCurrFlatIndex[kCount_PaintFlats];
int flattenToIndex(SkFlattenable* obj, PaintFlats); int flattenToIndex(SkFlattenable* obj, PaintFlats);
// Common code used by drawBitmap* when flattening. // Common code used by drawBitmap*. Behaves differently depending on the
bool commonDrawBitmapFlatten(const SkBitmap& bm, DrawOps op, unsigned flags, // type of SkBitmapHeap being used, which is determined by the flags used.
size_t opBytesNeeded, const SkPaint* paint); bool commonDrawBitmap(const SkBitmap& bm, DrawOps op, unsigned flags,
// Common code used by drawBitmap* when storing in the heap. size_t opBytesNeeded, const SkPaint* paint);
bool commonDrawBitmapHeap(const SkBitmap& bm, DrawOps op, unsigned flags,
size_t opBytesNeeded, const SkPaint* paint);
// Convenience type for function pointer
typedef bool (SkGPipeCanvas::*BitmapCommonFunction)(const SkBitmap&,
DrawOps, unsigned,
size_t, const SkPaint*);
SkPaint fPaint; SkPaint fPaint;
void writePaint(const SkPaint&); void writePaint(const SkPaint&);
@ -321,23 +318,20 @@ void SkGPipeCanvas::flattenFactoryNames() {
} }
} }
int SkGPipeCanvas::flattenToIndex(const SkBitmap & bitmap) { bool SkGPipeCanvas::shuttleBitmap(const SkBitmap& bm, int32_t slot) {
SkASSERT(shouldFlattenBitmaps(fFlags)); SkASSERT(shouldFlattenBitmaps(fFlags));
uint32_t flags = SkFlattenableWriteBuffer::kCrossProcess_Flag; SkOrderedWriteBuffer buffer(1024);
bool added, replaced; buffer.setNamedFactoryRecorder(fFactorySet);
const SkFlatData* flat = fBitmapDictionary.findAndReplace( bm.flatten(buffer);
bitmap, flags, fBitmapHeap.flatToReplace(), &added, &replaced); this->flattenFactoryNames();
uint32_t size = buffer.size();
int index = flat->index(); if (this->needOpBytes(size)) {
if (added) { this->writeOp(kDef_Bitmap_DrawOp, 0, slot);
this->flattenFactoryNames(); void* dst = static_cast<void*>(fWriter.reserve(size));
size_t flatSize = flat->flatSize(); buffer.writeToMemory(dst);
if (this->needOpBytes(flatSize)) { return true;
this->writeOp(kDef_Bitmap_DrawOp, 0, index);
fWriter.write(flat->data(), flatSize);
}
} }
return index; return false;
} }
// return 0 for NULL (or unflattenable obj), or index-base-1 // return 0 for NULL (or unflattenable obj), or index-base-1
@ -377,6 +371,24 @@ int SkGPipeCanvas::flattenToIndex(SkFlattenable* obj, PaintFlats paintflat) {
/////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////
/**
* If SkBitmaps are to be flattened to send to the reader, this class is
* provided to the SkBitmapHeap to tell the SkGPipeCanvas to do so.
*/
class BitmapShuttle : public SkBitmapHeap::ExternalStorage {
public:
BitmapShuttle(SkGPipeCanvas*);
~BitmapShuttle();
virtual bool insert(const SkBitmap& bitmap, int32_t slot) SK_OVERRIDE;
private:
SkGPipeCanvas* fCanvas;
};
///////////////////////////////////////////////////////////////////////////////
#define MIN_BLOCK_SIZE (16 * 1024) #define MIN_BLOCK_SIZE (16 * 1024)
#define BITMAPS_TO_KEEP 5 #define BITMAPS_TO_KEEP 5
#define FLATTENABLES_TO_KEEP 10 #define FLATTENABLES_TO_KEEP 10
@ -386,8 +398,6 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
: fFactorySet(isCrossProcess(flags) ? SkNEW(SkNamedFactorySet) : NULL) : fFactorySet(isCrossProcess(flags) ? SkNEW(SkNamedFactorySet) : NULL)
, fWriter(*writer) , fWriter(*writer)
, fFlags(flags) , fFlags(flags)
, fBitmapHeap(BITMAPS_TO_KEEP, fFactorySet)
, fBitmapDictionary(&fBitmapHeap)
, fFlattenableHeap(FLATTENABLES_TO_KEEP, fFactorySet) , fFlattenableHeap(FLATTENABLES_TO_KEEP, fFactorySet)
, fFlatDictionary(&fFlattenableHeap) { , fFlatDictionary(&fFlattenableHeap) {
fController = controller; fController = controller;
@ -411,10 +421,12 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
} }
if (shouldFlattenBitmaps(flags)) { if (shouldFlattenBitmaps(flags)) {
// TODO: Use the shared heap for cross process case as well. BitmapShuttle* shuttle = SkNEW_ARGS(BitmapShuttle, (this));
fSharedHeap = NULL; fSharedHeap = SkNEW_ARGS(SkBitmapHeap, (shuttle, BITMAPS_TO_KEEP));
shuttle->unref();
} else { } else {
fSharedHeap = SkNEW_ARGS(SkBitmapHeap, (5, controller->numberOfReaders())); fSharedHeap = SkNEW_ARGS(SkBitmapHeap,
(BITMAPS_TO_KEEP, controller->numberOfReaders()));
if (this->needOpBytes(sizeof(void*))) { if (this->needOpBytes(sizeof(void*))) {
this->writeOp(kShareHeap_DrawOp); this->writeOp(kShareHeap_DrawOp);
fWriter.writePtr(static_cast<void*>(fSharedHeap)); fWriter.writePtr(static_cast<void*>(fSharedHeap));
@ -426,8 +438,6 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
SkGPipeCanvas::~SkGPipeCanvas() { SkGPipeCanvas::~SkGPipeCanvas() {
this->finish(); this->finish();
SkSafeUnref(fFactorySet); SkSafeUnref(fFactorySet);
// FIXME: This can be changed to unref() once fSharedHeap is used by cross
// process case.
SkSafeUnref(fSharedHeap); SkSafeUnref(fSharedHeap);
} }
@ -682,26 +692,10 @@ void SkGPipeCanvas::drawPath(const SkPath& path, const SkPaint& paint) {
} }
} }
bool SkGPipeCanvas::commonDrawBitmapFlatten(const SkBitmap& bm, DrawOps op, bool SkGPipeCanvas::commonDrawBitmap(const SkBitmap& bm, DrawOps op,
unsigned flags, unsigned flags,
size_t opBytesNeeded, size_t opBytesNeeded,
const SkPaint* paint) { const SkPaint* paint) {
if (paint != NULL) {
flags |= kDrawBitmap_HasPaint_DrawOpsFlag;
this->writePaint(*paint);
}
int bitmapIndex = this->flattenToIndex(bm);
if (this->needOpBytes(opBytesNeeded)) {
this->writeOp(op, flags, bitmapIndex);
return true;
}
return false;
}
bool SkGPipeCanvas::commonDrawBitmapHeap(const SkBitmap& bm, DrawOps op,
unsigned flags,
size_t opBytesNeeded,
const SkPaint* paint) {
int32_t bitmapIndex = fSharedHeap->insert(bm); int32_t bitmapIndex = fSharedHeap->insert(bm);
if (SkBitmapHeap::INVALID_SLOT == bitmapIndex) { if (SkBitmapHeap::INVALID_SLOT == bitmapIndex) {
return false; return false;
@ -722,11 +716,7 @@ void SkGPipeCanvas::drawBitmap(const SkBitmap& bm, SkScalar left, SkScalar top,
NOTIFY_SETUP(this); NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(SkScalar) * 2; size_t opBytesNeeded = sizeof(SkScalar) * 2;
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ? if (this->commonDrawBitmap(bm, kDrawBitmap_DrawOp, 0, opBytesNeeded, paint)) {
&SkGPipeCanvas::commonDrawBitmapFlatten :
&SkGPipeCanvas::commonDrawBitmapHeap;
if ((*this.*bitmapCommon)(bm, kDrawBitmap_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.writeScalar(left); fWriter.writeScalar(left);
fWriter.writeScalar(top); fWriter.writeScalar(top);
} }
@ -745,11 +735,7 @@ void SkGPipeCanvas::drawBitmapRect(const SkBitmap& bm, const SkIRect* src,
flags = 0; flags = 0;
} }
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ? if (this->commonDrawBitmap(bm, kDrawBitmapRect_DrawOp, flags, opBytesNeeded, paint)) {
&SkGPipeCanvas::commonDrawBitmapFlatten :
&SkGPipeCanvas::commonDrawBitmapHeap;
if ((*this.*bitmapCommon)(bm, kDrawBitmapRect_DrawOp, flags, opBytesNeeded, paint)) {
if (hasSrc) { if (hasSrc) {
fWriter.write32(src->fLeft); fWriter.write32(src->fLeft);
fWriter.write32(src->fTop); fWriter.write32(src->fTop);
@ -765,11 +751,7 @@ void SkGPipeCanvas::drawBitmapMatrix(const SkBitmap& bm, const SkMatrix& matrix,
NOTIFY_SETUP(this); NOTIFY_SETUP(this);
size_t opBytesNeeded = matrix.writeToMemory(NULL); size_t opBytesNeeded = matrix.writeToMemory(NULL);
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ? if (this->commonDrawBitmap(bm, kDrawBitmapMatrix_DrawOp, 0, opBytesNeeded, paint)) {
&SkGPipeCanvas::commonDrawBitmapFlatten :
&SkGPipeCanvas::commonDrawBitmapHeap;
if ((*this.*bitmapCommon)(bm, kDrawBitmapMatrix_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.writeMatrix(matrix); fWriter.writeMatrix(matrix);
} }
} }
@ -779,11 +761,7 @@ void SkGPipeCanvas::drawBitmapNine(const SkBitmap& bm, const SkIRect& center,
NOTIFY_SETUP(this); NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(int32_t) * 4 + sizeof(SkRect); size_t opBytesNeeded = sizeof(int32_t) * 4 + sizeof(SkRect);
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ? if (this->commonDrawBitmap(bm, kDrawBitmapNine_DrawOp, 0, opBytesNeeded, paint)) {
&SkGPipeCanvas::commonDrawBitmapFlatten :
&SkGPipeCanvas::commonDrawBitmapHeap;
if ((*this.*bitmapCommon)(bm, kDrawBitmapNine_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.write32(center.fLeft); fWriter.write32(center.fLeft);
fWriter.write32(center.fTop); fWriter.write32(center.fTop);
fWriter.write32(center.fRight); fWriter.write32(center.fRight);
@ -797,11 +775,7 @@ void SkGPipeCanvas::drawSprite(const SkBitmap& bm, int left, int top,
NOTIFY_SETUP(this); NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(int32_t) * 2; size_t opBytesNeeded = sizeof(int32_t) * 2;
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ? if (this->commonDrawBitmap(bm, kDrawSprite_DrawOp, 0, opBytesNeeded, paint)) {
&SkGPipeCanvas::commonDrawBitmapFlatten :
&SkGPipeCanvas::commonDrawBitmapHeap;
if ((*this.*bitmapCommon)(bm, kDrawSprite_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.write32(left); fWriter.write32(left);
fWriter.write32(top); fWriter.write32(top);
} }
@ -960,11 +934,6 @@ void SkGPipeCanvas::flushRecording(bool detachCurrentBlock) {
} }
size_t SkGPipeCanvas::freeMemoryIfPossible(size_t bytesToFree) { size_t SkGPipeCanvas::freeMemoryIfPossible(size_t bytesToFree) {
// FIXME: This can be removed once fSharedHeap is used by cross process
// case.
if (NULL == fSharedHeap) {
return 0;
}
return fSharedHeap->freeMemoryIfPossible(bytesToFree); return fSharedHeap->freeMemoryIfPossible(bytesToFree);
} }
@ -1145,3 +1114,18 @@ size_t SkGPipeWriter::storageAllocatedForRecording() const {
return NULL == fCanvas ? 0 : fCanvas->storageAllocatedForRecording(); return NULL == fCanvas ? 0 : fCanvas->storageAllocatedForRecording();
} }
///////////////////////////////////////////////////////////////////////////////
BitmapShuttle::BitmapShuttle(SkGPipeCanvas* canvas) {
SkASSERT(canvas != NULL);
fCanvas = canvas;
fCanvas->ref();
}
BitmapShuttle::~BitmapShuttle() {
fCanvas->unref();
}
bool BitmapShuttle::insert(const SkBitmap& bitmap, int32_t slot) {
return fCanvas->shuttleBitmap(bitmap, slot);
}