Only store resources in the GrResourceCache::fScratchMap that are available to be scratch.

Currently when we create a scratch resource, we immediately add it to
scratch map and it will stay there until we delete the resource. The one
exception to this is adding a unique key will remove a resource from
the scratch map. This means there are resources in the scratch map that
can't be returned when looking for a scratch because they are either
already in use by something else or their budget was changed to
unbudgeted. This means everything time we do a scratch lookup, even
after finding the list of resources that match a key, we still have to
iterate that list to see if we can use that resource or not.

The problem comes when we may have lots of resources that all match the
same key (think 1000s of identical buffers). Then the cost of iterating
this list starts to get very high.

This change makes it so only resources that can actively be used as a
scratch at that moment are stored in the scratch map. Thus when we find
a scratch resource we pull it out of the scratch map. When that resources
refs go back to zero it is added back to the scratch map. Similar removal
is also now used for changing a resource to and from budgeted.

Change-Id: I52b415d0e035dfc589f3d712be85799a56827bf0
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/367976
Reviewed-by: Brian Salomon <bsalomon@google.com>
Commit-Queue: Greg Daniel <egdaniel@google.com>
This commit is contained in:
Greg Daniel 2021-02-08 13:37:29 -05:00
parent 49e5b3a339
commit 1a2326363a
6 changed files with 83 additions and 64 deletions

View File

@ -163,7 +163,7 @@ void GrGpuResource::notifyRefCntWillBeZero() const {
mutableThis->willRemoveLastRef();
}
void GrGpuResource::notifyRefCntIsZero() const {
void GrGpuResource::notifyARefCntIsZero(LastRemovedRef removedRef) const {
if (this->wasDestroyed()) {
// We've already been removed from the cache. Goodbye cruel world!
delete this;
@ -172,7 +172,7 @@ void GrGpuResource::notifyRefCntIsZero() const {
GrGpuResource* mutableThis = const_cast<GrGpuResource*>(this);
get_resource_cache(fGpu)->resourceAccess().notifyRefCntReachedZero(mutableThis);
get_resource_cache(fGpu)->resourceAccess().notifyARefCntReachedZero(mutableThis, removedRef);
}
void GrGpuResource::removeScratchKey() {

View File

@ -21,9 +21,9 @@ class SkTraceMemoryDump;
* Separated out as a base class to isolate the ref-cnting behavior and provide friendship without
* exposing all of GrGpuResource.
*
* PRIOR to the last ref being removed DERIVED::notifyRefCntWillBeZero() will be called
* PRIOR to the last ref being removed DERIVED::notifyARefCntWillBeZero() will be called
* (static poly morphism using CRTP). It is legal for additional ref's to be added
* during this time. AFTER the ref count reaches zero DERIVED::notifyRefCntIsZero() will be
* during this time. AFTER the ref count reaches zero DERIVED::notifyARefCntIsZero() will be
* called.
*/
template <typename DERIVED> class GrIORef : public SkNoncopyable {
@ -37,11 +37,16 @@ public:
(void)fRefCnt.fetch_add(+1, std::memory_order_relaxed);
}
// This enum is used to notify the GrResourceCache which type of ref just dropped to zero.
enum class LastRemovedRef {
kMainRef, // This refers to fRefCnt
kCommandBufferUsage, // This refers to fCommandBufferUsageCnt
};
void unref() const {
SkASSERT(this->getRefCnt() > 0);
if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel) &&
this->hasNoCommandBufferUsages()) {
this->notifyWillBeZero();
if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
this->notifyWillBeZero(LastRemovedRef::kMainRef);
}
}
@ -52,9 +57,8 @@ public:
void removeCommandBufferUsage() const {
SkASSERT(!this->hasNoCommandBufferUsages());
if (1 == fCommandBufferUsageCnt.fetch_add(-1, std::memory_order_acq_rel) &&
0 == this->getRefCnt()) {
this->notifyWillBeZero();
if (1 == fCommandBufferUsageCnt.fetch_add(-1, std::memory_order_acq_rel)) {
this->notifyWillBeZero(LastRemovedRef::kCommandBufferUsage);
}
}
@ -63,8 +67,6 @@ public:
#endif
protected:
friend class GrResourceCache; // for internalHasRef
GrIORef() : fRefCnt(1), fCommandBufferUsageCnt(0) {}
bool internalHasRef() const { return SkToBool(this->getRefCnt()); }
@ -80,18 +82,22 @@ protected:
}
private:
void notifyWillBeZero() const {
// At this point we better be the only thread accessing this resource.
// Trick out the notifyRefCntWillBeZero() call by adding back one more ref.
fRefCnt.fetch_add(+1, std::memory_order_relaxed);
static_cast<const DERIVED*>(this)->notifyRefCntWillBeZero();
// notifyRefCntWillBeZero() could have done anything, including re-refing this and
// passing on to another thread. Take away the ref-count we re-added above and see
// if we're back to zero.
// TODO: Consider making it so that refs can't be added and merge
// notifyRefCntWillBeZero()/willRemoveLastRef() with notifyRefCntIsZero().
if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
static_cast<const DERIVED*>(this)->notifyRefCntIsZero();
void notifyWillBeZero(LastRemovedRef removedRef) const {
if (0 == this->getRefCnt() && this->hasNoCommandBufferUsages()) {
// At this point we better be the only thread accessing this resource.
// Trick out the notifyRefCntWillBeZero() call by adding back one more ref.
fRefCnt.fetch_add(+1, std::memory_order_relaxed);
static_cast<const DERIVED*>(this)->notifyRefCntWillBeZero();
// notifyRefCntWillBeZero() could have done anything, including re-refing this and
// passing on to another thread. Take away the ref-count we re-added above and see
// if we're back to zero.
// TODO: Consider making it so that refs can't be added and merge
// notifyRefCntWillBeZero()/willRemoveLastRef() with notifyARefCntIsZero().
if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
static_cast<const DERIVED*>(this)->notifyARefCntIsZero(removedRef);
}
} else {
static_cast<const DERIVED*>(this)->notifyARefCntIsZero(removedRef);
}
}
@ -297,7 +303,7 @@ private:
void setUniqueKey(const GrUniqueKey&);
void removeUniqueKey();
void notifyRefCntWillBeZero() const;
void notifyRefCntIsZero() const;
void notifyARefCntIsZero(LastRemovedRef removedRef) const;
void removeScratchKey();
void makeBudgeted();
void makeUnbudgeted();
@ -328,7 +334,8 @@ private:
const UniqueID fUniqueID;
using INHERITED = GrIORef<GrGpuResource>;
friend class GrIORef<GrGpuResource>; // to access notifyRefCntWillBeZero and notifyRefCntIsZero.
friend class GrIORef<GrGpuResource>; // to access notifyRefCntWillBeZero and
// notifyARefCntIsZero.
};
class GrGpuResource::ProxyAccess {

View File

@ -32,6 +32,10 @@ private:
GrBudgetedType::kBudgeted == fResource->resourcePriv().budgetedType();
}
bool isUsableAsScratch() const {
return this->isScratch() && !fResource->internalHasRef();
}
/**
* Called by the cache to delete the resource under normal circumstances.
*/

View File

@ -155,12 +155,7 @@ void GrResourceCache::insertResource(GrGpuResource* resource) {
fBudgetedHighWaterBytes = std::max(fBudgetedBytes, fBudgetedHighWaterBytes);
#endif
}
if (resource->resourcePriv().getScratchKey().isValid() &&
!resource->getUniqueKey().isValid()) {
SkASSERT(!resource->resourcePriv().refsWrappedObjects());
fScratchMap.insert(resource->resourcePriv().getScratchKey(), resource);
}
SkASSERT(!resource->cacheAccess().isUsableAsScratch());
this->purgeAsNeeded();
}
@ -186,8 +181,7 @@ void GrResourceCache::removeResource(GrGpuResource* resource) {
fBudgetedBytes, "free", fMaxBytes - fBudgetedBytes);
}
if (resource->resourcePriv().getScratchKey().isValid() &&
!resource->getUniqueKey().isValid()) {
if (resource->cacheAccess().isUsableAsScratch()) {
fScratchMap.remove(resource->resourcePriv().getScratchKey(), resource);
}
if (resource->getUniqueKey().isValid()) {
@ -285,13 +279,8 @@ public:
AvailableForScratchUse() { }
bool operator()(const GrGpuResource* resource) const {
SkASSERT(!resource->getUniqueKey().isValid() &&
resource->resourcePriv().getScratchKey().isValid());
// isScratch() also tests that the resource is budgeted.
if (resource->internalHasRef() || !resource->cacheAccess().isScratch()) {
return false;
}
// Everything that is in the scratch map should be usable as a
// scratch resource.
return true;
}
};
@ -301,6 +290,7 @@ GrGpuResource* GrResourceCache::findAndRefScratchResource(const GrScratchKey& sc
GrGpuResource* resource = fScratchMap.find(scratchKey, AvailableForScratchUse());
if (resource) {
fScratchMap.remove(scratchKey, resource);
this->refAndMakeResourceMRU(resource);
this->validate();
}
@ -310,7 +300,7 @@ GrGpuResource* GrResourceCache::findAndRefScratchResource(const GrScratchKey& sc
void GrResourceCache::willRemoveScratchKey(const GrGpuResource* resource) {
ASSERT_SINGLE_OWNER
SkASSERT(resource->resourcePriv().getScratchKey().isValid());
if (!resource->getUniqueKey().isValid()) {
if (resource->cacheAccess().isUsableAsScratch()) {
fScratchMap.remove(resource->resourcePriv().getScratchKey(), resource);
}
}
@ -324,7 +314,7 @@ void GrResourceCache::removeUniqueKey(GrGpuResource* resource) {
fUniqueHash.remove(resource->getUniqueKey());
}
resource->cacheAccess().removeUniqueKey();
if (resource->resourcePriv().getScratchKey().isValid()) {
if (resource->cacheAccess().isUsableAsScratch()) {
fScratchMap.insert(resource->resourcePriv().getScratchKey(), resource);
}
@ -361,8 +351,9 @@ void GrResourceCache::changeUniqueKey(GrGpuResource* resource, const GrUniqueKey
SkASSERT(nullptr == fUniqueHash.find(resource->getUniqueKey()));
} else {
// 'resource' didn't have a valid unique key before so it is switching sides. Remove it
// from the ScratchMap
if (resource->resourcePriv().getScratchKey().isValid()) {
// from the ScratchMap. The isUsableAsScratch call depends on us not adding the new
// unique key until after this check.
if (resource->cacheAccess().isUsableAsScratch()) {
fScratchMap.remove(resource->resourcePriv().getScratchKey(), resource);
}
}
@ -397,7 +388,8 @@ void GrResourceCache::refAndMakeResourceMRU(GrGpuResource* resource) {
this->validate();
}
void GrResourceCache::notifyRefCntReachedZero(GrGpuResource* resource) {
void GrResourceCache::notifyARefCntReachedZero(GrGpuResource* resource,
GrGpuResource::LastRemovedRef removedRef) {
ASSERT_SINGLE_OWNER
SkASSERT(resource);
SkASSERT(!resource->wasDestroyed());
@ -406,6 +398,17 @@ void GrResourceCache::notifyRefCntReachedZero(GrGpuResource* resource) {
// will be moved to the queue if it is newly purgeable.
SkASSERT(fNonpurgeableResources[*resource->cacheAccess().accessCacheIndex()] == resource);
if (removedRef == GrGpuResource::LastRemovedRef::kMainRef) {
if (resource->cacheAccess().isUsableAsScratch()) {
fScratchMap.insert(resource->resourcePriv().getScratchKey(), resource);
}
}
if (resource->cacheAccess().hasRefOrCommandBufferUsage()) {
this->validate();
return;
}
#ifdef SK_DEBUG
// When the timestamp overflows validate() is called. validate() checks that resources in
// the nonpurgeable array are indeed not purgeable. However, the movement from the array to
@ -489,6 +492,9 @@ void GrResourceCache::didChangeBudgetStatus(GrGpuResource* resource) {
!resource->cacheAccess().hasRefOrCommandBufferUsage()) {
++fNumBudgetedResourcesFlushWillMakePurgeable;
}
if (resource->cacheAccess().isUsableAsScratch()) {
fScratchMap.insert(resource->resourcePriv().getScratchKey(), resource);
}
this->purgeAsNeeded();
} else {
SkASSERT(resource->resourcePriv().budgetedType() != GrBudgetedType::kUnbudgetedCacheable);
@ -498,6 +504,10 @@ void GrResourceCache::didChangeBudgetStatus(GrGpuResource* resource) {
!resource->cacheAccess().hasRefOrCommandBufferUsage()) {
--fNumBudgetedResourcesFlushWillMakePurgeable;
}
if (!resource->cacheAccess().hasRef() && !resource->getUniqueKey().isValid() &&
resource->resourcePriv().getScratchKey().isValid()) {
fScratchMap.remove(resource->resourcePriv().getScratchKey(), resource);
}
}
SkASSERT(wasPurgeable == resource->resourcePriv().isPurgeable());
TRACE_COUNTER2("skia.gpu.cache", "skia budget", "used",
@ -857,29 +867,24 @@ void GrResourceCache::validate() const {
const GrScratchKey& scratchKey = resource->resourcePriv().getScratchKey();
const GrUniqueKey& uniqueKey = resource->getUniqueKey();
if (resource->cacheAccess().isScratch()) {
if (resource->cacheAccess().isUsableAsScratch()) {
SkASSERT(!uniqueKey.isValid());
SkASSERT(GrBudgetedType::kBudgeted == resource->resourcePriv().budgetedType());
SkASSERT(!resource->cacheAccess().hasRef());
++fScratch;
SkASSERT(fScratchMap->countForKey(scratchKey));
SkASSERT(!resource->resourcePriv().refsWrappedObjects());
} else if (scratchKey.isValid()) {
SkASSERT(GrBudgetedType::kBudgeted != resource->resourcePriv().budgetedType() ||
uniqueKey.isValid());
if (!uniqueKey.isValid()) {
++fCouldBeScratch;
SkASSERT(fScratchMap->countForKey(scratchKey));
}
uniqueKey.isValid() || resource->cacheAccess().hasRef());
SkASSERT(!resource->resourcePriv().refsWrappedObjects());
SkASSERT(!fScratchMap->has(resource, scratchKey));
}
if (uniqueKey.isValid()) {
++fContent;
SkASSERT(fUniqueHash->find(uniqueKey) == resource);
SkASSERT(GrBudgetedType::kBudgeted == resource->resourcePriv().budgetedType() ||
resource->resourcePriv().refsWrappedObjects());
if (scratchKey.isValid()) {
SkASSERT(!fScratchMap->has(resource, scratchKey));
}
}
if (GrBudgetedType::kBudgeted == resource->resourcePriv().budgetedType()) {
@ -892,8 +897,7 @@ void GrResourceCache::validate() const {
{
int count = 0;
fScratchMap.foreach([&](const GrGpuResource& resource) {
SkASSERT(resource.resourcePriv().getScratchKey().isValid());
SkASSERT(!resource.getUniqueKey().isValid());
SkASSERT(resource.cacheAccess().isUsableAsScratch());
count++;
});
SkASSERT(count == fScratchMap.count());
@ -941,7 +945,7 @@ void GrResourceCache::validate() const {
SkASSERT(fBudgetedCount <= fBudgetedHighWaterCount);
#endif
SkASSERT(stats.fContent == fUniqueHash.count());
SkASSERT(stats.fScratch + stats.fCouldBeScratch == fScratchMap.count());
SkASSERT(stats.fScratch == fScratchMap.count());
// This assertion is not currently valid because we can be in recursive notifyCntReachedZero()
// calls. This will be fixed when subresource registration is explicit.

View File

@ -249,7 +249,7 @@ private:
////
void insertResource(GrGpuResource*);
void removeResource(GrGpuResource*);
void notifyRefCntReachedZero(GrGpuResource*);
void notifyARefCntReachedZero(GrGpuResource*, GrGpuResource::LastRemovedRef);
void changeUniqueKey(GrGpuResource*, const GrUniqueKey&);
void removeUniqueKey(GrGpuResource*);
void willRemoveScratchKey(const GrGpuResource*);
@ -406,10 +406,12 @@ private:
kRefCntReachedZero_RefNotificationFlag = 0x2,
};
/**
* Called by GrGpuResources when they detect that their ref cnt has reached zero.
* Called by GrGpuResources when they detect one of their ref cnts have reached zero. This may
* either be the main ref or the command buffer usage ref.
*/
void notifyRefCntReachedZero(GrGpuResource* resource) {
fCache->notifyRefCntReachedZero(resource);
void notifyARefCntReachedZero(GrGpuResource* resource,
GrGpuResource::LastRemovedRef removedRef) {
fCache->notifyARefCntReachedZero(resource, removedRef);
}
/**

View File

@ -817,7 +817,8 @@ static void test_duplicate_scratch_key(skiatest::Reporter* reporter) {
// Scratch resources are registered with GrResourceCache just by existing. There are 2.
REPORTER_ASSERT(reporter, 2 == TestResource::NumAlive());
SkDEBUGCODE(REPORTER_ASSERT(reporter, 2 == cache->countScratchEntriesForKey(scratchKey));)
// As long as there are outstanding refs on the resources they will not be in the scratch map
SkDEBUGCODE(REPORTER_ASSERT(reporter, 0 == cache->countScratchEntriesForKey(scratchKey));)
REPORTER_ASSERT(reporter, 2 == cache->getResourceCount());
REPORTER_ASSERT(reporter, a->gpuMemorySize() + b->gpuMemorySize() ==
cache->getResourceBytes());
@ -831,6 +832,7 @@ static void test_duplicate_scratch_key(skiatest::Reporter* reporter) {
a->unref();
b->unref();
REPORTER_ASSERT(reporter, 2 == TestResource::NumAlive());
// Since we removed the refs to the resources they will now be in the scratch map
SkDEBUGCODE(REPORTER_ASSERT(reporter, 2 == cache->countScratchEntriesForKey(scratchKey));)
// Purge again. This time resources should be purgeable.