Make the intervals in GrResourceAllocator use SkArenaAlloc

Change-Id: I3190396fe34c01c232654fcb225dbf76df3137b4
Reviewed-on: https://skia-review.googlesource.com/66463
Reviewed-by: Brian Salomon <bsalomon@google.com>
Commit-Queue: Robert Phillips <robertphillips@google.com>
This commit is contained in:
Robert Phillips 2017-11-01 17:32:39 -04:00 committed by Skia Commit-Bot
parent ce54bcecc2
commit 8186cbee6c
5 changed files with 51 additions and 18 deletions

View File

@ -171,12 +171,12 @@ GrSemaphoresSubmitted GrDrawingManager::internalFlush(GrSurfaceProxy*,
}
#endif
#ifdef MDB_ALLOC_RESOURCES
GrResourceAllocator alloc(fContext->resourceProvider());
for (int i = 0; i < fOpLists.count(); ++i) {
fOpLists[i]->gatherProxyIntervals(&alloc);
}
#ifdef MDB_ALLOC_RESOURCES
alloc.assign();
#endif

View File

@ -257,6 +257,10 @@ bool GrRenderTargetOpList::copySurface(const GrCaps& caps,
}
void GrRenderTargetOpList::gatherProxyIntervals(GrResourceAllocator* alloc) const {
if (!fRecordedOps.count()) {
return;
}
unsigned int cur = alloc->numOps();
// Add the interval for all the writes to this opList's target
@ -266,14 +270,14 @@ void GrRenderTargetOpList::gatherProxyIntervals(GrResourceAllocator* alloc) cons
alloc->addInterval(p);
};
for (int i = 0; i < fRecordedOps.count(); ++i) {
SkASSERT(alloc->curOp() == cur+i);
const GrOp* op = fRecordedOps[i].fOp.get(); // only diff from the GrTextureOpList version
if (op) {
op->visitProxies(gather);
alloc->incOps();
}
}
}
static inline bool can_reorder(const SkRect& a, const SkRect& b) { return !GrRectsOverlap(a, b); }

View File

@ -17,13 +17,19 @@ void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy,
if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
// Revise the interval for an existing use
SkASSERT(intvl->fEnd < start);
//SkASSERT(intvl->fEnd <= end);
intvl->fEnd = end;
return;
}
// TODO: given the usage pattern an arena allocation scheme would work well here
Interval* newIntvl = new Interval(proxy, start, end);
Interval* newIntvl;
if (fFreeIntervalList) {
newIntvl = fFreeIntervalList;
fFreeIntervalList = newIntvl->fNext;
newIntvl->resetTo(proxy, start, end);
} else {
newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
}
fIntvlList.insertByIncreasingStart(newIntvl);
fIntvlHash.add(newIntvl);
@ -109,7 +115,10 @@ void GrResourceAllocator::expire(unsigned int curIndex) {
while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->fEnd < curIndex) {
Interval* temp = fActiveIntvls.popHead();
this->freeUpSurface(temp->fProxy->priv().peekSurface());
delete temp;
// Add temp to the free interval list so it can be reused
temp->fNext = fFreeIntervalList;
fFreeIntervalList = temp;
}
}

View File

@ -11,6 +11,8 @@
#include "GrGpuResourcePriv.h"
#include "GrSurface.h"
#include "GrSurfaceProxy.h"
#include "SkArenaAlloc.h"
#include "SkTDynamicHash.h"
#include "SkTMultiMap.h"
@ -86,6 +88,16 @@ private:
SkASSERT(proxy);
}
void resetTo(GrSurfaceProxy* proxy, unsigned int start, unsigned int end) {
SkASSERT(proxy);
fProxy = proxy;
fProxyID = proxy->uniqueID().asUInt();
fStart = start;
fEnd = end;
fNext = nullptr;
}
// for SkTDynamicHash
static const uint32_t& GetKey(const Interval& intvl) {
return intvl.fProxyID;
@ -103,11 +115,8 @@ private:
public:
IntervalList() = default;
~IntervalList() {
while (fHead) {
Interval* temp = fHead;
fHead = temp->fNext;
delete temp;
}
// The only time we delete an IntervalList is in the GrResourceAllocator dtor.
// Since the arena allocator will clean up for us we don't bother here.
}
bool empty() const { return !SkToBool(fHead); }
@ -120,6 +129,9 @@ private:
Interval* fHead = nullptr;
};
// Gathered statistics indicate that 99% of flushes will be covered by <= 12 Intervals
static const int kInitialArenaSize = 12 * sizeof(Interval);
GrResourceProvider* fResourceProvider;
FreePoolMultiMap fFreePool; // Recently created/used GrSurfaces
IntvlHash fIntvlHash; // All the intervals, hashed by proxyID
@ -129,6 +141,10 @@ private:
// (sorted by increasing end)
unsigned int fNumOps = 0;
SkDEBUGCODE(bool fAssigned = false;)
char fStorage[kInitialArenaSize];
SkArenaAlloc fIntervalAllocator { fStorage, kInitialArenaSize, 0 };
Interval* fFreeIntervalList = nullptr;
};
#endif // GrResourceAllocator_DEFINED

View File

@ -124,6 +124,10 @@ bool GrTextureOpList::copySurface(const GrCaps& caps,
}
void GrTextureOpList::gatherProxyIntervals(GrResourceAllocator* alloc) const {
if (!fRecordedOps.count()) {
return;
}
unsigned int cur = alloc->numOps();
// Add the interval for all the writes to this opList's target
@ -133,14 +137,14 @@ void GrTextureOpList::gatherProxyIntervals(GrResourceAllocator* alloc) const {
alloc->addInterval(p);
};
for (int i = 0; i < fRecordedOps.count(); ++i) {
SkASSERT(alloc->curOp() == cur+i);
const GrOp* op = fRecordedOps[i].get(); // only diff from the GrRenderTargetOpList version
if (op) {
op->visitProxies(gather);
alloc->incOps();
}
}
}
void GrTextureOpList::recordOp(std::unique_ptr<GrOp> op) {
SkASSERT(fTarget.get());