Revert r5063 until unit tests can be fixed.
git-svn-id: http://skia.googlecode.com/svn/trunk@5067 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
3319f33470
commit
fdb9b212a8
@ -15,7 +15,9 @@
|
|||||||
SkBitmapHeapEntry::SkBitmapHeapEntry()
|
SkBitmapHeapEntry::SkBitmapHeapEntry()
|
||||||
: fSlot(-1)
|
: fSlot(-1)
|
||||||
, fRefCount(0)
|
, fRefCount(0)
|
||||||
, fBytesAllocated(0) {
|
, fBytesAllocated(0)
|
||||||
|
, fMoreRecentlyUsed(NULL)
|
||||||
|
, fLessRecentlyUsed(NULL) {
|
||||||
}
|
}
|
||||||
|
|
||||||
SkBitmapHeapEntry::~SkBitmapHeapEntry() {
|
SkBitmapHeapEntry::~SkBitmapHeapEntry() {
|
||||||
@ -35,30 +37,6 @@ void SkBitmapHeapEntry::addReferences(int count) {
|
|||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
int SkBitmapHeap::LookupEntry::Compare(const SkBitmapHeap::LookupEntry *a,
|
|
||||||
const SkBitmapHeap::LookupEntry *b) {
|
|
||||||
if (a->fGenerationId < b->fGenerationId) {
|
|
||||||
return -1;
|
|
||||||
} else if (a->fGenerationId > b->fGenerationId) {
|
|
||||||
return 1;
|
|
||||||
} else if (a->fPixelOffset < b->fPixelOffset) {
|
|
||||||
return -1;
|
|
||||||
} else if (a->fPixelOffset > b->fPixelOffset) {
|
|
||||||
return 1;
|
|
||||||
} else if (a->fWidth < b->fWidth) {
|
|
||||||
return -1;
|
|
||||||
} else if (a->fWidth > b->fWidth) {
|
|
||||||
return 1;
|
|
||||||
} else if (a->fHeight < b->fHeight) {
|
|
||||||
return -1;
|
|
||||||
} else if (a->fHeight > b->fHeight) {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
SkBitmapHeap::SkBitmapHeap(int32_t preferredSize, int32_t ownerCount)
|
SkBitmapHeap::SkBitmapHeap(int32_t preferredSize, int32_t ownerCount)
|
||||||
: INHERITED()
|
: INHERITED()
|
||||||
, fExternalStorage(NULL)
|
, fExternalStorage(NULL)
|
||||||
@ -114,35 +92,27 @@ SkTRefArray<SkBitmap>* SkBitmapHeap::extractBitmaps() const {
|
|||||||
return array;
|
return array;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SkBitmapHeap::removeFromLRU(SkBitmapHeap::LookupEntry* entry) {
|
// We just "used" the entry. Update our LRU accordingly
|
||||||
if (fMostRecentlyUsed == entry) {
|
void SkBitmapHeap::setMostRecentlyUsed(SkBitmapHeapEntry* entry) {
|
||||||
fMostRecentlyUsed = entry->fLessRecentlyUsed;
|
SkASSERT(entry != NULL);
|
||||||
if (NULL == fMostRecentlyUsed) {
|
if (entry == fMostRecentlyUsed) {
|
||||||
SkASSERT(fLeastRecentlyUsed == entry);
|
return;
|
||||||
fLeastRecentlyUsed = NULL;
|
}
|
||||||
} else {
|
// Remove info from its prior place, and make sure to cover the hole.
|
||||||
fMostRecentlyUsed->fMoreRecentlyUsed = NULL;
|
if (fLeastRecentlyUsed == entry) {
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Remove entry from its prior place, and make sure to cover the hole.
|
|
||||||
if (fLeastRecentlyUsed == entry) {
|
|
||||||
SkASSERT(entry->fMoreRecentlyUsed != NULL);
|
|
||||||
fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
|
|
||||||
}
|
|
||||||
// Since we have already considered the case where entry is the most recently used, it must
|
|
||||||
// have a more recently used at this point.
|
|
||||||
SkASSERT(entry->fMoreRecentlyUsed != NULL);
|
SkASSERT(entry->fMoreRecentlyUsed != NULL);
|
||||||
|
fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
|
||||||
|
}
|
||||||
|
if (entry->fMoreRecentlyUsed != NULL) {
|
||||||
|
SkASSERT(fMostRecentlyUsed != entry);
|
||||||
entry->fMoreRecentlyUsed->fLessRecentlyUsed = entry->fLessRecentlyUsed;
|
entry->fMoreRecentlyUsed->fLessRecentlyUsed = entry->fLessRecentlyUsed;
|
||||||
|
}
|
||||||
if (entry->fLessRecentlyUsed != NULL) {
|
if (entry->fLessRecentlyUsed != NULL) {
|
||||||
SkASSERT(fLeastRecentlyUsed != entry);
|
SkASSERT(fLeastRecentlyUsed != entry);
|
||||||
entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed;
|
entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
entry->fMoreRecentlyUsed = NULL;
|
entry->fMoreRecentlyUsed = NULL;
|
||||||
}
|
// Set up the head and tail pointers properly.
|
||||||
|
|
||||||
void SkBitmapHeap::appendToLRU(SkBitmapHeap::LookupEntry* entry) {
|
|
||||||
if (fMostRecentlyUsed != NULL) {
|
if (fMostRecentlyUsed != NULL) {
|
||||||
SkASSERT(NULL == fMostRecentlyUsed->fMoreRecentlyUsed);
|
SkASSERT(NULL == fMostRecentlyUsed->fMoreRecentlyUsed);
|
||||||
fMostRecentlyUsed->fMoreRecentlyUsed = entry;
|
fMostRecentlyUsed->fMoreRecentlyUsed = entry;
|
||||||
@ -155,20 +125,19 @@ void SkBitmapHeap::appendToLRU(SkBitmapHeap::LookupEntry* entry) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// iterate through our LRU cache and try to find an entry to evict
|
// iterate through our LRU cache and try to find an entry to evict
|
||||||
SkBitmapHeap::LookupEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) {
|
SkBitmapHeapEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) {
|
||||||
SkASSERT(fPreferredCount != UNLIMITED_SIZE);
|
SkASSERT(fPreferredCount != UNLIMITED_SIZE);
|
||||||
SkASSERT(fStorage.count() >= fPreferredCount);
|
SkASSERT(fStorage.count() >= fPreferredCount);
|
||||||
|
|
||||||
SkBitmapHeap::LookupEntry* iter = fLeastRecentlyUsed;
|
SkBitmapHeapEntry* iter = fLeastRecentlyUsed;
|
||||||
while (iter != NULL) {
|
while (iter != NULL) {
|
||||||
SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
|
if (iter->fRefCount > 0) {
|
||||||
if (heapEntry->fRefCount > 0) {
|
|
||||||
// If the least recently used bitmap has not been unreferenced
|
// If the least recently used bitmap has not been unreferenced
|
||||||
// by its owner, then according to our LRU specifications a more
|
// by its owner, then according to our LRU specifications a more
|
||||||
// recently used one can not have used all it's references yet either.
|
// recently used one can not have used all it's references yet either.
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
if (replacement.getGenerationID() == iter->fGenerationId) {
|
if (replacement.pixelRef() && replacement.pixelRef() == iter->fBitmap.pixelRef()) {
|
||||||
// Do not replace a bitmap with a new one using the same
|
// Do not replace a bitmap with a new one using the same
|
||||||
// pixel ref. Instead look for a different one that will
|
// pixel ref. Instead look for a different one that will
|
||||||
// potentially free up more space.
|
// potentially free up more space.
|
||||||
@ -184,22 +153,21 @@ size_t SkBitmapHeap::freeMemoryIfPossible(size_t bytesToFree) {
|
|||||||
if (UNLIMITED_SIZE == fPreferredCount) {
|
if (UNLIMITED_SIZE == fPreferredCount) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
LookupEntry* iter = fLeastRecentlyUsed;
|
SkBitmapHeapEntry* iter = fLeastRecentlyUsed;
|
||||||
size_t origBytesAllocated = fBytesAllocated;
|
size_t origBytesAllocated = fBytesAllocated;
|
||||||
// Purge starting from LRU until a non-evictable bitmap is found or until
|
// Purge starting from LRU until a non-evictable bitmap is found or until
|
||||||
// everything is evicted.
|
// everything is evicted.
|
||||||
while (iter != NULL) {
|
while (iter && 0 == iter->fRefCount) {
|
||||||
SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
|
SkBitmapHeapEntry* next = iter->fMoreRecentlyUsed;
|
||||||
if (heapEntry->fRefCount > 0) {
|
this->removeEntryFromLookupTable(*iter);
|
||||||
break;
|
|
||||||
}
|
|
||||||
LookupEntry* next = iter->fMoreRecentlyUsed;
|
|
||||||
this->removeEntryFromLookupTable(iter);
|
|
||||||
// Free the pixel memory. removeEntryFromLookupTable already reduced
|
// Free the pixel memory. removeEntryFromLookupTable already reduced
|
||||||
// fBytesAllocated properly.
|
// fBytesAllocated properly.
|
||||||
heapEntry->fBitmap.reset();
|
iter->fBitmap.reset();
|
||||||
// Add to list of unused slots which can be reused in the future.
|
// Add to list of unused slots which can be reused in the future.
|
||||||
fUnusedSlots.push(heapEntry->fSlot);
|
fUnusedSlots.push(iter->fSlot);
|
||||||
|
// Remove its LRU pointers, so that it does not pretend it is already in
|
||||||
|
// the list the next time it is used.
|
||||||
|
iter->fMoreRecentlyUsed = iter->fLessRecentlyUsed = NULL;
|
||||||
iter = next;
|
iter = next;
|
||||||
if (origBytesAllocated - fBytesAllocated >= bytesToFree) {
|
if (origBytesAllocated - fBytesAllocated >= bytesToFree) {
|
||||||
break;
|
break;
|
||||||
@ -225,17 +193,17 @@ size_t SkBitmapHeap::freeMemoryIfPossible(size_t bytesToFree) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int SkBitmapHeap::findInLookupTable(const LookupEntry& indexEntry, SkBitmapHeapEntry** entry) {
|
int SkBitmapHeap::findInLookupTable(const LookupEntry& indexEntry, SkBitmapHeapEntry** entry) {
|
||||||
int index = SkTSearch<const LookupEntry>((const LookupEntry**)fLookupTable.begin(),
|
int index = SkTSearch<const LookupEntry>(fLookupTable.begin(),
|
||||||
fLookupTable.count(),
|
fLookupTable.count(),
|
||||||
&indexEntry, sizeof(void*), LookupEntry::Compare);
|
indexEntry, sizeof(indexEntry));
|
||||||
|
|
||||||
if (index < 0) {
|
if (index < 0) {
|
||||||
// insert ourselves into the bitmapIndex
|
// insert ourselves into the bitmapIndex
|
||||||
index = ~index;
|
index = ~index;
|
||||||
*fLookupTable.insert(index) = SkNEW_ARGS(LookupEntry, (indexEntry));
|
fLookupTable.insert(index, 1, &indexEntry);
|
||||||
} else if (entry != NULL) {
|
} else if (entry != NULL) {
|
||||||
// populate the entry if needed
|
// populate the entry if needed
|
||||||
*entry = fStorage[fLookupTable[index]->fStorageSlot];
|
*entry = fStorage[fLookupTable[index].fStorageSlot];
|
||||||
}
|
}
|
||||||
|
|
||||||
return index;
|
return index;
|
||||||
@ -261,16 +229,19 @@ bool SkBitmapHeap::copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBi
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
int SkBitmapHeap::removeEntryFromLookupTable(LookupEntry* entry) {
|
int SkBitmapHeap::removeEntryFromLookupTable(const SkBitmapHeapEntry& entry) {
|
||||||
// remove the bitmap index for the deleted entry
|
// remove the bitmap index for the deleted entry
|
||||||
SkDEBUGCODE(int count = fLookupTable.count();)
|
SkDEBUGCODE(int count = fLookupTable.count();)
|
||||||
int index = this->findInLookupTable(*entry, NULL);
|
// FIXME: If copying bitmaps retained the generation ID, we could
|
||||||
|
// just grab the generation ID from entry.fBitmap
|
||||||
|
LookupEntry key(entry.fBitmap, entry.fGenerationID);
|
||||||
|
int index = this->findInLookupTable(key, NULL);
|
||||||
// Verify that findInLookupTable found an existing entry rather than adding
|
// Verify that findInLookupTable found an existing entry rather than adding
|
||||||
// a new entry to the lookup table.
|
// a new entry to the lookup table.
|
||||||
SkASSERT(count == fLookupTable.count());
|
SkASSERT(count == fLookupTable.count());
|
||||||
SkDELETE(fLookupTable[index]);
|
|
||||||
fLookupTable.remove(index);
|
fLookupTable.remove(index);
|
||||||
fBytesAllocated -= fStorage[entry->fStorageSlot]->fBytesAllocated;
|
fBytesAllocated -= entry.fBytesAllocated;
|
||||||
return index;
|
return index;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -278,17 +249,13 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
|
|||||||
SkBitmapHeapEntry* entry = NULL;
|
SkBitmapHeapEntry* entry = NULL;
|
||||||
int searchIndex = this->findInLookupTable(LookupEntry(originalBitmap), &entry);
|
int searchIndex = this->findInLookupTable(LookupEntry(originalBitmap), &entry);
|
||||||
|
|
||||||
|
// check to see if we already had a copy of the bitmap in the heap
|
||||||
if (entry) {
|
if (entry) {
|
||||||
// Already had a copy of the bitmap in the heap.
|
|
||||||
if (fOwnerCount != IGNORE_OWNERS) {
|
if (fOwnerCount != IGNORE_OWNERS) {
|
||||||
entry->addReferences(fOwnerCount);
|
entry->addReferences(fOwnerCount);
|
||||||
}
|
}
|
||||||
if (fPreferredCount != UNLIMITED_SIZE) {
|
if (fPreferredCount != UNLIMITED_SIZE) {
|
||||||
LookupEntry* lookupEntry = fLookupTable[searchIndex];
|
this->setMostRecentlyUsed(entry);
|
||||||
if (lookupEntry != fMostRecentlyUsed) {
|
|
||||||
this->removeFromLRU(lookupEntry);
|
|
||||||
this->appendToLRU(lookupEntry);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return entry->fSlot;
|
return entry->fSlot;
|
||||||
}
|
}
|
||||||
@ -296,13 +263,10 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
|
|||||||
// decide if we need to evict an existing heap entry or create a new one
|
// decide if we need to evict an existing heap entry or create a new one
|
||||||
if (fPreferredCount != UNLIMITED_SIZE && fStorage.count() >= fPreferredCount) {
|
if (fPreferredCount != UNLIMITED_SIZE && fStorage.count() >= fPreferredCount) {
|
||||||
// iterate through our LRU cache and try to find an entry to evict
|
// iterate through our LRU cache and try to find an entry to evict
|
||||||
LookupEntry* lookupEntry = this->findEntryToReplace(originalBitmap);
|
entry = this->findEntryToReplace(originalBitmap);
|
||||||
if (lookupEntry != NULL) {
|
// we found an entry to evict
|
||||||
// we found an entry to evict
|
if (entry) {
|
||||||
entry = fStorage[lookupEntry->fStorageSlot];
|
int index = this->removeEntryFromLookupTable(*entry);
|
||||||
// Remove it from the LRU. The new entry will be added to the LRU later.
|
|
||||||
this->removeFromLRU(lookupEntry);
|
|
||||||
int index = this->removeEntryFromLookupTable(lookupEntry);
|
|
||||||
|
|
||||||
// update the current search index now that we have removed one
|
// update the current search index now that we have removed one
|
||||||
if (index < searchIndex) {
|
if (index < searchIndex) {
|
||||||
@ -336,7 +300,6 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
|
|||||||
// if the copy failed then we must abort
|
// if the copy failed then we must abort
|
||||||
if (!copySucceeded) {
|
if (!copySucceeded) {
|
||||||
// delete the index
|
// delete the index
|
||||||
SkDELETE(fLookupTable[searchIndex]);
|
|
||||||
fLookupTable.remove(searchIndex);
|
fLookupTable.remove(searchIndex);
|
||||||
// If entry is the last slot in storage, it is safe to delete it.
|
// If entry is the last slot in storage, it is safe to delete it.
|
||||||
if (fStorage.count() - 1 == entry->fSlot) {
|
if (fStorage.count() - 1 == entry->fSlot) {
|
||||||
@ -344,16 +307,14 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
|
|||||||
fStorage.remove(entry->fSlot);
|
fStorage.remove(entry->fSlot);
|
||||||
fBytesAllocated -= sizeof(SkBitmapHeapEntry);
|
fBytesAllocated -= sizeof(SkBitmapHeapEntry);
|
||||||
SkDELETE(entry);
|
SkDELETE(entry);
|
||||||
} else {
|
|
||||||
fUnusedSlots.push(entry->fSlot);
|
|
||||||
}
|
}
|
||||||
return INVALID_SLOT;
|
return INVALID_SLOT;
|
||||||
}
|
}
|
||||||
|
|
||||||
// update the index with the appropriate slot in the heap
|
// update the index with the appropriate slot in the heap
|
||||||
fLookupTable[searchIndex]->fStorageSlot = entry->fSlot;
|
fLookupTable[searchIndex].fStorageSlot = entry->fSlot;
|
||||||
|
|
||||||
// compute the space taken by this entry
|
// compute the space taken by the this entry
|
||||||
// TODO if there is a shared pixel ref don't count it
|
// TODO if there is a shared pixel ref don't count it
|
||||||
// If the SkBitmap does not share an SkPixelRef with an SkBitmap already
|
// If the SkBitmap does not share an SkPixelRef with an SkBitmap already
|
||||||
// in the SharedHeap, also include the size of its pixels.
|
// in the SharedHeap, also include the size of its pixels.
|
||||||
@ -362,11 +323,13 @@ int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
|
|||||||
// add the bytes from this entry to the total count
|
// add the bytes from this entry to the total count
|
||||||
fBytesAllocated += entry->fBytesAllocated;
|
fBytesAllocated += entry->fBytesAllocated;
|
||||||
|
|
||||||
|
entry->fGenerationID = originalBitmap.getGenerationID();
|
||||||
|
|
||||||
if (fOwnerCount != IGNORE_OWNERS) {
|
if (fOwnerCount != IGNORE_OWNERS) {
|
||||||
entry->addReferences(fOwnerCount);
|
entry->addReferences(fOwnerCount);
|
||||||
}
|
}
|
||||||
if (fPreferredCount != UNLIMITED_SIZE) {
|
if (fPreferredCount != UNLIMITED_SIZE) {
|
||||||
this->appendToLRU(fLookupTable[searchIndex]);
|
this->setMostRecentlyUsed(entry);
|
||||||
}
|
}
|
||||||
return entry->fSlot;
|
return entry->fSlot;
|
||||||
}
|
}
|
||||||
|
@ -39,12 +39,16 @@ private:
|
|||||||
|
|
||||||
int32_t fSlot;
|
int32_t fSlot;
|
||||||
int32_t fRefCount;
|
int32_t fRefCount;
|
||||||
|
uint32_t fGenerationID;
|
||||||
|
|
||||||
SkBitmap fBitmap;
|
SkBitmap fBitmap;
|
||||||
// Keep track of the bytes allocated for this bitmap. When replacing the
|
// Keep track of the bytes allocated for this bitmap. When replacing the
|
||||||
// bitmap or removing this HeapEntry we know how much memory has been
|
// bitmap or removing this HeapEntry we know how much memory has been
|
||||||
// reclaimed.
|
// reclaimed.
|
||||||
size_t fBytesAllocated;
|
size_t fBytesAllocated;
|
||||||
|
// TODO: Generalize the LRU caching mechanism
|
||||||
|
SkBitmapHeapEntry* fMoreRecentlyUsed;
|
||||||
|
SkBitmapHeapEntry* fLessRecentlyUsed;
|
||||||
|
|
||||||
friend class SkBitmapHeap;
|
friend class SkBitmapHeap;
|
||||||
};
|
};
|
||||||
@ -172,8 +176,7 @@ public:
|
|||||||
* Returns a count of the number of items currently in the heap
|
* Returns a count of the number of items currently in the heap
|
||||||
*/
|
*/
|
||||||
int count() const {
|
int count() const {
|
||||||
SkASSERT(fExternalStorage != NULL ||
|
SkASSERT(fExternalStorage != NULL || fStorage.count() == fLookupTable.count());
|
||||||
fStorage.count() - fUnusedSlots.count() == fLookupTable.count());
|
|
||||||
return fLookupTable.count();
|
return fLookupTable.count();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -194,39 +197,43 @@ public:
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
struct LookupEntry {
|
struct LookupEntry {
|
||||||
LookupEntry(const SkBitmap& bm)
|
LookupEntry(const SkBitmap& bm, uint32_t genId = 0) {
|
||||||
: fGenerationId(bm.getGenerationID())
|
fGenerationId = 0 == genId ? bm.getGenerationID() : genId;
|
||||||
, fPixelOffset(bm.pixelRefOffset())
|
fPixelOffset = bm.pixelRefOffset();
|
||||||
, fWidth(bm.width())
|
fWidth = bm.width();
|
||||||
, fHeight(bm.height())
|
fHeight = bm.height();
|
||||||
, fMoreRecentlyUsed(NULL)
|
}
|
||||||
, fLessRecentlyUsed(NULL){}
|
uint32_t fGenerationId; // SkPixelRef GenerationID.
|
||||||
|
size_t fPixelOffset;
|
||||||
const uint32_t fGenerationId; // SkPixelRef GenerationID.
|
uint32_t fWidth;
|
||||||
const size_t fPixelOffset;
|
uint32_t fHeight;
|
||||||
const uint32_t fWidth;
|
|
||||||
const uint32_t fHeight;
|
|
||||||
|
|
||||||
// TODO: Generalize the LRU caching mechanism
|
|
||||||
LookupEntry* fMoreRecentlyUsed;
|
|
||||||
LookupEntry* fLessRecentlyUsed;
|
|
||||||
|
|
||||||
uint32_t fStorageSlot; // slot of corresponding bitmap in fStorage.
|
uint32_t fStorageSlot; // slot of corresponding bitmap in fStorage.
|
||||||
|
|
||||||
/**
|
bool operator < (const LookupEntry& other) const {
|
||||||
* Compare two LookupEntry pointers, returning -1, 0, 1 for sorting.
|
if (this->fGenerationId != other.fGenerationId) {
|
||||||
*/
|
return this->fGenerationId < other.fGenerationId;
|
||||||
static int Compare(const LookupEntry* a, const LookupEntry* b);
|
} else if(this->fPixelOffset != other.fPixelOffset) {
|
||||||
|
return this->fPixelOffset < other.fPixelOffset;
|
||||||
|
} else if(this->fWidth != other.fWidth) {
|
||||||
|
return this->fWidth < other.fWidth;
|
||||||
|
} else {
|
||||||
|
return this->fHeight < other.fHeight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bool operator != (const LookupEntry& other) const {
|
||||||
|
return this->fGenerationId != other.fGenerationId
|
||||||
|
|| this->fPixelOffset != other.fPixelOffset
|
||||||
|
|| this->fWidth != other.fWidth
|
||||||
|
|| this->fHeight != other.fHeight;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove the entry from the lookup table. Also deletes the entry pointed
|
* Remove the entry from the lookup table.
|
||||||
* to by the table. Therefore, if a pointer to that one was passed in, the
|
|
||||||
* pointer should no longer be used, since the object to which it points has
|
|
||||||
* been deleted.
|
|
||||||
* @return The index in the lookup table of the entry before removal.
|
* @return The index in the lookup table of the entry before removal.
|
||||||
*/
|
*/
|
||||||
int removeEntryFromLookupTable(LookupEntry*);
|
int removeEntryFromLookupTable(const SkBitmapHeapEntry&);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Searches for the bitmap in the lookup table and returns the bitmaps index within the table.
|
* Searches for the bitmap in the lookup table and returns the bitmaps index within the table.
|
||||||
@ -238,27 +245,12 @@ private:
|
|||||||
*/
|
*/
|
||||||
int findInLookupTable(const LookupEntry& key, SkBitmapHeapEntry** entry);
|
int findInLookupTable(const LookupEntry& key, SkBitmapHeapEntry** entry);
|
||||||
|
|
||||||
LookupEntry* findEntryToReplace(const SkBitmap& replacement);
|
SkBitmapHeapEntry* findEntryToReplace(const SkBitmap& replacement);
|
||||||
bool copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBitmap);
|
bool copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBitmap);
|
||||||
|
void setMostRecentlyUsed(SkBitmapHeapEntry* entry);
|
||||||
/**
|
|
||||||
* Remove a LookupEntry from the LRU, in preparation for either deleting or appending as most
|
|
||||||
* recent. Points the LookupEntry's old neighbors at each other, and sets fLeastRecentlyUsed
|
|
||||||
* (if there is still an entry left). Sets LookupEntry's fMoreRecentlyUsed to NULL and leaves
|
|
||||||
* its fLessRecentlyUsed unmodified.
|
|
||||||
*/
|
|
||||||
void removeFromLRU(LookupEntry* entry);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Append a LookupEntry to the end of the LRU cache, marking it as the most
|
|
||||||
* recently used. Assumes that the LookupEntry is already in fLookupTable,
|
|
||||||
* but is not in the LRU cache. If it is in the cache, removeFromLRU should
|
|
||||||
* be called first.
|
|
||||||
*/
|
|
||||||
void appendToLRU(LookupEntry*);
|
|
||||||
|
|
||||||
// searchable index that maps to entries in the heap
|
// searchable index that maps to entries in the heap
|
||||||
SkTDArray<LookupEntry*> fLookupTable;
|
SkTDArray<LookupEntry> fLookupTable;
|
||||||
|
|
||||||
// heap storage
|
// heap storage
|
||||||
SkTDArray<SkBitmapHeapEntry*> fStorage;
|
SkTDArray<SkBitmapHeapEntry*> fStorage;
|
||||||
@ -267,8 +259,8 @@ private:
|
|||||||
SkTDArray<int> fUnusedSlots;
|
SkTDArray<int> fUnusedSlots;
|
||||||
ExternalStorage* fExternalStorage;
|
ExternalStorage* fExternalStorage;
|
||||||
|
|
||||||
LookupEntry* fMostRecentlyUsed;
|
SkBitmapHeapEntry* fMostRecentlyUsed;
|
||||||
LookupEntry* fLeastRecentlyUsed;
|
SkBitmapHeapEntry* fLeastRecentlyUsed;
|
||||||
|
|
||||||
const int32_t fPreferredCount;
|
const int32_t fPreferredCount;
|
||||||
const int32_t fOwnerCount;
|
const int32_t fOwnerCount;
|
||||||
|
@ -113,6 +113,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
void addBitmap(int index) {
|
void addBitmap(int index) {
|
||||||
|
index--;
|
||||||
SkBitmap* bm;
|
SkBitmap* bm;
|
||||||
if(fBitmaps.count() == index) {
|
if(fBitmaps.count() == index) {
|
||||||
bm = SkNEW(SkBitmap);
|
bm = SkNEW(SkBitmap);
|
||||||
@ -124,7 +125,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
SkBitmap* getBitmap(unsigned index) {
|
SkBitmap* getBitmap(unsigned index) {
|
||||||
return fBitmaps[index];
|
return fBitmaps[index - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
void setSharedHeap(SkBitmapHeap* heap) {
|
void setSharedHeap(SkBitmapHeap* heap) {
|
||||||
|
@ -163,15 +163,6 @@ public:
|
|||||||
if (this->needOpBytes()) {
|
if (this->needOpBytes()) {
|
||||||
this->writeOp(kDone_DrawOp);
|
this->writeOp(kDone_DrawOp);
|
||||||
this->doNotify();
|
this->doNotify();
|
||||||
if (shouldFlattenBitmaps(fFlags)) {
|
|
||||||
// In this case, a BitmapShuttle is reffed by the SharedHeap
|
|
||||||
// and refs this canvas. Unref the SharedHeap to end the
|
|
||||||
// circular reference. When shouldFlattenBitmaps is false,
|
|
||||||
// there is no circular reference, so the SharedHeap can be
|
|
||||||
// safely unreffed in the destructor.
|
|
||||||
fSharedHeap->unref();
|
|
||||||
fSharedHeap = NULL;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
fDone = true;
|
fDone = true;
|
||||||
}
|
}
|
||||||
@ -181,6 +172,11 @@ public:
|
|||||||
size_t freeMemoryIfPossible(size_t bytesToFree);
|
size_t freeMemoryIfPossible(size_t bytesToFree);
|
||||||
|
|
||||||
size_t storageAllocatedForRecording() {
|
size_t storageAllocatedForRecording() {
|
||||||
|
// FIXME: This can be removed once fSharedHeap is used by cross process
|
||||||
|
// case.
|
||||||
|
if (NULL == fSharedHeap) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
return fSharedHeap->bytesAllocated();
|
return fSharedHeap->bytesAllocated();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,11 +231,6 @@ public:
|
|||||||
const SkPaint&) SK_OVERRIDE;
|
const SkPaint&) SK_OVERRIDE;
|
||||||
virtual void drawData(const void*, size_t) SK_OVERRIDE;
|
virtual void drawData(const void*, size_t) SK_OVERRIDE;
|
||||||
|
|
||||||
/**
|
|
||||||
* Flatten an SkBitmap to send to the reader, where it will be referenced
|
|
||||||
* according to slot.
|
|
||||||
*/
|
|
||||||
bool shuttleBitmap(const SkBitmap&, int32_t slot);
|
|
||||||
private:
|
private:
|
||||||
enum {
|
enum {
|
||||||
kNoSaveLayer = -1,
|
kNoSaveLayer = -1,
|
||||||
@ -252,7 +243,7 @@ private:
|
|||||||
size_t fBlockSize; // amount allocated for writer
|
size_t fBlockSize; // amount allocated for writer
|
||||||
size_t fBytesNotified;
|
size_t fBytesNotified;
|
||||||
bool fDone;
|
bool fDone;
|
||||||
const uint32_t fFlags;
|
uint32_t fFlags;
|
||||||
|
|
||||||
SkRefCntSet fTypefaceSet;
|
SkRefCntSet fTypefaceSet;
|
||||||
|
|
||||||
@ -282,15 +273,27 @@ private:
|
|||||||
// if a new SkFlatData was added when in cross process mode
|
// if a new SkFlatData was added when in cross process mode
|
||||||
void flattenFactoryNames();
|
void flattenFactoryNames();
|
||||||
|
|
||||||
|
// These are only used when in cross process, but with no shared address
|
||||||
|
// space, so bitmaps are flattened.
|
||||||
|
FlattenableHeap fBitmapHeap;
|
||||||
|
SkBitmapDictionary fBitmapDictionary;
|
||||||
|
int flattenToIndex(const SkBitmap&);
|
||||||
|
|
||||||
FlattenableHeap fFlattenableHeap;
|
FlattenableHeap fFlattenableHeap;
|
||||||
FlatDictionary fFlatDictionary;
|
FlatDictionary fFlatDictionary;
|
||||||
int fCurrFlatIndex[kCount_PaintFlats];
|
int fCurrFlatIndex[kCount_PaintFlats];
|
||||||
int flattenToIndex(SkFlattenable* obj, PaintFlats);
|
int flattenToIndex(SkFlattenable* obj, PaintFlats);
|
||||||
|
|
||||||
// Common code used by drawBitmap*. Behaves differently depending on the
|
// Common code used by drawBitmap* when flattening.
|
||||||
// type of SkBitmapHeap being used, which is determined by the flags used.
|
bool commonDrawBitmapFlatten(const SkBitmap& bm, DrawOps op, unsigned flags,
|
||||||
bool commonDrawBitmap(const SkBitmap& bm, DrawOps op, unsigned flags,
|
size_t opBytesNeeded, const SkPaint* paint);
|
||||||
size_t opBytesNeeded, const SkPaint* paint);
|
// Common code used by drawBitmap* when storing in the heap.
|
||||||
|
bool commonDrawBitmapHeap(const SkBitmap& bm, DrawOps op, unsigned flags,
|
||||||
|
size_t opBytesNeeded, const SkPaint* paint);
|
||||||
|
// Convenience type for function pointer
|
||||||
|
typedef bool (SkGPipeCanvas::*BitmapCommonFunction)(const SkBitmap&,
|
||||||
|
DrawOps, unsigned,
|
||||||
|
size_t, const SkPaint*);
|
||||||
|
|
||||||
SkPaint fPaint;
|
SkPaint fPaint;
|
||||||
void writePaint(const SkPaint&);
|
void writePaint(const SkPaint&);
|
||||||
@ -318,20 +321,23 @@ void SkGPipeCanvas::flattenFactoryNames() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool SkGPipeCanvas::shuttleBitmap(const SkBitmap& bm, int32_t slot) {
|
int SkGPipeCanvas::flattenToIndex(const SkBitmap & bitmap) {
|
||||||
SkASSERT(shouldFlattenBitmaps(fFlags));
|
SkASSERT(shouldFlattenBitmaps(fFlags));
|
||||||
SkOrderedWriteBuffer buffer(1024);
|
uint32_t flags = SkFlattenableWriteBuffer::kCrossProcess_Flag;
|
||||||
buffer.setNamedFactoryRecorder(fFactorySet);
|
bool added, replaced;
|
||||||
bm.flatten(buffer);
|
const SkFlatData* flat = fBitmapDictionary.findAndReplace(
|
||||||
this->flattenFactoryNames();
|
bitmap, flags, fBitmapHeap.flatToReplace(), &added, &replaced);
|
||||||
uint32_t size = buffer.size();
|
|
||||||
if (this->needOpBytes(size)) {
|
int index = flat->index();
|
||||||
this->writeOp(kDef_Bitmap_DrawOp, 0, slot);
|
if (added) {
|
||||||
void* dst = static_cast<void*>(fWriter.reserve(size));
|
this->flattenFactoryNames();
|
||||||
buffer.writeToMemory(dst);
|
size_t flatSize = flat->flatSize();
|
||||||
return true;
|
if (this->needOpBytes(flatSize)) {
|
||||||
|
this->writeOp(kDef_Bitmap_DrawOp, 0, index);
|
||||||
|
fWriter.write(flat->data(), flatSize);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return index;
|
||||||
}
|
}
|
||||||
|
|
||||||
// return 0 for NULL (or unflattenable obj), or index-base-1
|
// return 0 for NULL (or unflattenable obj), or index-base-1
|
||||||
@ -371,24 +377,6 @@ int SkGPipeCanvas::flattenToIndex(SkFlattenable* obj, PaintFlats paintflat) {
|
|||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
/**
|
|
||||||
* If SkBitmaps are to be flattened to send to the reader, this class is
|
|
||||||
* provided to the SkBitmapHeap to tell the SkGPipeCanvas to do so.
|
|
||||||
*/
|
|
||||||
class BitmapShuttle : public SkBitmapHeap::ExternalStorage {
|
|
||||||
public:
|
|
||||||
BitmapShuttle(SkGPipeCanvas*);
|
|
||||||
|
|
||||||
~BitmapShuttle();
|
|
||||||
|
|
||||||
virtual bool insert(const SkBitmap& bitmap, int32_t slot) SK_OVERRIDE;
|
|
||||||
|
|
||||||
private:
|
|
||||||
SkGPipeCanvas* fCanvas;
|
|
||||||
};
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
#define MIN_BLOCK_SIZE (16 * 1024)
|
#define MIN_BLOCK_SIZE (16 * 1024)
|
||||||
#define BITMAPS_TO_KEEP 5
|
#define BITMAPS_TO_KEEP 5
|
||||||
#define FLATTENABLES_TO_KEEP 10
|
#define FLATTENABLES_TO_KEEP 10
|
||||||
@ -398,6 +386,8 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
|
|||||||
: fFactorySet(isCrossProcess(flags) ? SkNEW(SkNamedFactorySet) : NULL)
|
: fFactorySet(isCrossProcess(flags) ? SkNEW(SkNamedFactorySet) : NULL)
|
||||||
, fWriter(*writer)
|
, fWriter(*writer)
|
||||||
, fFlags(flags)
|
, fFlags(flags)
|
||||||
|
, fBitmapHeap(BITMAPS_TO_KEEP, fFactorySet)
|
||||||
|
, fBitmapDictionary(&fBitmapHeap)
|
||||||
, fFlattenableHeap(FLATTENABLES_TO_KEEP, fFactorySet)
|
, fFlattenableHeap(FLATTENABLES_TO_KEEP, fFactorySet)
|
||||||
, fFlatDictionary(&fFlattenableHeap) {
|
, fFlatDictionary(&fFlattenableHeap) {
|
||||||
fController = controller;
|
fController = controller;
|
||||||
@ -421,12 +411,10 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (shouldFlattenBitmaps(flags)) {
|
if (shouldFlattenBitmaps(flags)) {
|
||||||
BitmapShuttle* shuttle = SkNEW_ARGS(BitmapShuttle, (this));
|
// TODO: Use the shared heap for cross process case as well.
|
||||||
fSharedHeap = SkNEW_ARGS(SkBitmapHeap, (shuttle, BITMAPS_TO_KEEP));
|
fSharedHeap = NULL;
|
||||||
shuttle->unref();
|
|
||||||
} else {
|
} else {
|
||||||
fSharedHeap = SkNEW_ARGS(SkBitmapHeap,
|
fSharedHeap = SkNEW_ARGS(SkBitmapHeap, (5, controller->numberOfReaders()));
|
||||||
(BITMAPS_TO_KEEP, controller->numberOfReaders()));
|
|
||||||
if (this->needOpBytes(sizeof(void*))) {
|
if (this->needOpBytes(sizeof(void*))) {
|
||||||
this->writeOp(kShareHeap_DrawOp);
|
this->writeOp(kShareHeap_DrawOp);
|
||||||
fWriter.writePtr(static_cast<void*>(fSharedHeap));
|
fWriter.writePtr(static_cast<void*>(fSharedHeap));
|
||||||
@ -438,6 +426,8 @@ SkGPipeCanvas::SkGPipeCanvas(SkGPipeController* controller,
|
|||||||
SkGPipeCanvas::~SkGPipeCanvas() {
|
SkGPipeCanvas::~SkGPipeCanvas() {
|
||||||
this->finish();
|
this->finish();
|
||||||
SkSafeUnref(fFactorySet);
|
SkSafeUnref(fFactorySet);
|
||||||
|
// FIXME: This can be changed to unref() once fSharedHeap is used by cross
|
||||||
|
// process case.
|
||||||
SkSafeUnref(fSharedHeap);
|
SkSafeUnref(fSharedHeap);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -692,10 +682,26 @@ void SkGPipeCanvas::drawPath(const SkPath& path, const SkPaint& paint) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool SkGPipeCanvas::commonDrawBitmap(const SkBitmap& bm, DrawOps op,
|
bool SkGPipeCanvas::commonDrawBitmapFlatten(const SkBitmap& bm, DrawOps op,
|
||||||
unsigned flags,
|
unsigned flags,
|
||||||
size_t opBytesNeeded,
|
size_t opBytesNeeded,
|
||||||
const SkPaint* paint) {
|
const SkPaint* paint) {
|
||||||
|
if (paint != NULL) {
|
||||||
|
flags |= kDrawBitmap_HasPaint_DrawOpsFlag;
|
||||||
|
this->writePaint(*paint);
|
||||||
|
}
|
||||||
|
int bitmapIndex = this->flattenToIndex(bm);
|
||||||
|
if (this->needOpBytes(opBytesNeeded)) {
|
||||||
|
this->writeOp(op, flags, bitmapIndex);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool SkGPipeCanvas::commonDrawBitmapHeap(const SkBitmap& bm, DrawOps op,
|
||||||
|
unsigned flags,
|
||||||
|
size_t opBytesNeeded,
|
||||||
|
const SkPaint* paint) {
|
||||||
int32_t bitmapIndex = fSharedHeap->insert(bm);
|
int32_t bitmapIndex = fSharedHeap->insert(bm);
|
||||||
if (SkBitmapHeap::INVALID_SLOT == bitmapIndex) {
|
if (SkBitmapHeap::INVALID_SLOT == bitmapIndex) {
|
||||||
return false;
|
return false;
|
||||||
@ -716,7 +722,11 @@ void SkGPipeCanvas::drawBitmap(const SkBitmap& bm, SkScalar left, SkScalar top,
|
|||||||
NOTIFY_SETUP(this);
|
NOTIFY_SETUP(this);
|
||||||
size_t opBytesNeeded = sizeof(SkScalar) * 2;
|
size_t opBytesNeeded = sizeof(SkScalar) * 2;
|
||||||
|
|
||||||
if (this->commonDrawBitmap(bm, kDrawBitmap_DrawOp, 0, opBytesNeeded, paint)) {
|
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapFlatten :
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapHeap;
|
||||||
|
|
||||||
|
if ((*this.*bitmapCommon)(bm, kDrawBitmap_DrawOp, 0, opBytesNeeded, paint)) {
|
||||||
fWriter.writeScalar(left);
|
fWriter.writeScalar(left);
|
||||||
fWriter.writeScalar(top);
|
fWriter.writeScalar(top);
|
||||||
}
|
}
|
||||||
@ -734,8 +744,12 @@ void SkGPipeCanvas::drawBitmapRect(const SkBitmap& bm, const SkIRect* src,
|
|||||||
} else {
|
} else {
|
||||||
flags = 0;
|
flags = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapFlatten :
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapHeap;
|
||||||
|
|
||||||
if (this->commonDrawBitmap(bm, kDrawBitmapRect_DrawOp, flags, opBytesNeeded, paint)) {
|
if ((*this.*bitmapCommon)(bm, kDrawBitmapRect_DrawOp, flags, opBytesNeeded, paint)) {
|
||||||
if (hasSrc) {
|
if (hasSrc) {
|
||||||
fWriter.write32(src->fLeft);
|
fWriter.write32(src->fLeft);
|
||||||
fWriter.write32(src->fTop);
|
fWriter.write32(src->fTop);
|
||||||
@ -751,7 +765,11 @@ void SkGPipeCanvas::drawBitmapMatrix(const SkBitmap& bm, const SkMatrix& matrix,
|
|||||||
NOTIFY_SETUP(this);
|
NOTIFY_SETUP(this);
|
||||||
size_t opBytesNeeded = matrix.writeToMemory(NULL);
|
size_t opBytesNeeded = matrix.writeToMemory(NULL);
|
||||||
|
|
||||||
if (this->commonDrawBitmap(bm, kDrawBitmapMatrix_DrawOp, 0, opBytesNeeded, paint)) {
|
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapFlatten :
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapHeap;
|
||||||
|
|
||||||
|
if ((*this.*bitmapCommon)(bm, kDrawBitmapMatrix_DrawOp, 0, opBytesNeeded, paint)) {
|
||||||
fWriter.writeMatrix(matrix);
|
fWriter.writeMatrix(matrix);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -761,7 +779,11 @@ void SkGPipeCanvas::drawBitmapNine(const SkBitmap& bm, const SkIRect& center,
|
|||||||
NOTIFY_SETUP(this);
|
NOTIFY_SETUP(this);
|
||||||
size_t opBytesNeeded = sizeof(int32_t) * 4 + sizeof(SkRect);
|
size_t opBytesNeeded = sizeof(int32_t) * 4 + sizeof(SkRect);
|
||||||
|
|
||||||
if (this->commonDrawBitmap(bm, kDrawBitmapNine_DrawOp, 0, opBytesNeeded, paint)) {
|
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapFlatten :
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapHeap;
|
||||||
|
|
||||||
|
if ((*this.*bitmapCommon)(bm, kDrawBitmapNine_DrawOp, 0, opBytesNeeded, paint)) {
|
||||||
fWriter.write32(center.fLeft);
|
fWriter.write32(center.fLeft);
|
||||||
fWriter.write32(center.fTop);
|
fWriter.write32(center.fTop);
|
||||||
fWriter.write32(center.fRight);
|
fWriter.write32(center.fRight);
|
||||||
@ -775,7 +797,11 @@ void SkGPipeCanvas::drawSprite(const SkBitmap& bm, int left, int top,
|
|||||||
NOTIFY_SETUP(this);
|
NOTIFY_SETUP(this);
|
||||||
size_t opBytesNeeded = sizeof(int32_t) * 2;
|
size_t opBytesNeeded = sizeof(int32_t) * 2;
|
||||||
|
|
||||||
if (this->commonDrawBitmap(bm, kDrawSprite_DrawOp, 0, opBytesNeeded, paint)) {
|
BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapFlatten :
|
||||||
|
&SkGPipeCanvas::commonDrawBitmapHeap;
|
||||||
|
|
||||||
|
if ((*this.*bitmapCommon)(bm, kDrawSprite_DrawOp, 0, opBytesNeeded, paint)) {
|
||||||
fWriter.write32(left);
|
fWriter.write32(left);
|
||||||
fWriter.write32(top);
|
fWriter.write32(top);
|
||||||
}
|
}
|
||||||
@ -934,6 +960,11 @@ void SkGPipeCanvas::flushRecording(bool detachCurrentBlock) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
size_t SkGPipeCanvas::freeMemoryIfPossible(size_t bytesToFree) {
|
size_t SkGPipeCanvas::freeMemoryIfPossible(size_t bytesToFree) {
|
||||||
|
// FIXME: This can be removed once fSharedHeap is used by cross process
|
||||||
|
// case.
|
||||||
|
if (NULL == fSharedHeap) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
return fSharedHeap->freeMemoryIfPossible(bytesToFree);
|
return fSharedHeap->freeMemoryIfPossible(bytesToFree);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1114,18 +1145,3 @@ size_t SkGPipeWriter::storageAllocatedForRecording() const {
|
|||||||
return NULL == fCanvas ? 0 : fCanvas->storageAllocatedForRecording();
|
return NULL == fCanvas ? 0 : fCanvas->storageAllocatedForRecording();
|
||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
BitmapShuttle::BitmapShuttle(SkGPipeCanvas* canvas) {
|
|
||||||
SkASSERT(canvas != NULL);
|
|
||||||
fCanvas = canvas;
|
|
||||||
fCanvas->ref();
|
|
||||||
}
|
|
||||||
|
|
||||||
BitmapShuttle::~BitmapShuttle() {
|
|
||||||
fCanvas->unref();
|
|
||||||
}
|
|
||||||
|
|
||||||
bool BitmapShuttle::insert(const SkBitmap& bitmap, int32_t slot) {
|
|
||||||
return fCanvas->shuttleBitmap(bitmap, slot);
|
|
||||||
}
|
|
||||||
|
Loading…
Reference in New Issue
Block a user