Revert "Add notion of active opList to GrDrawingManager"
This reverts commit 328f9c72ee
.
Reason for revert: assert is breaking Chrome roll
Original change's description:
> Add notion of active opList to GrDrawingManager
>
> With an incrementally sorted list of opLists we will no longer be able to
> assume that the last opList is the active one.
>
> This is pulled out of: https://skia-review.googlesource.com/c/skia/+/150614 (Begin centralizing opList DAG) and also contains some of the non-substantive changes from that CL.
>
> Change-Id: Ic3d264565afa61b19e2125c38a6ad053292bc7d9
> Reviewed-on: https://skia-review.googlesource.com/151021
> Reviewed-by: Brian Salomon <bsalomon@google.com>
> Commit-Queue: Robert Phillips <robertphillips@google.com>
TBR=bsalomon@google.com,robertphillips@google.com
Change-Id: I01647301a59e21da8dd1ad2aa4652cfc52b095ea
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://skia-review.googlesource.com/151369
Reviewed-by: Robert Phillips <robertphillips@google.com>
Commit-Queue: Robert Phillips <robertphillips@google.com>
This commit is contained in:
parent
cca8c6fa47
commit
936e33a346
@ -103,7 +103,7 @@ void GrDrawingManager::freeGpuResources() {
|
|||||||
fSoftwarePathRenderer = nullptr;
|
fSoftwarePathRenderer = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void end_oplist_flush_if_not_unique(GrOpList* opList) {
|
static void end_oplist_flush_if_not_unique(const sk_sp<GrOpList>& opList) {
|
||||||
if (!opList->unique()) {
|
if (!opList->unique()) {
|
||||||
// TODO: Eventually this should be guaranteed unique: http://skbug.com/7111
|
// TODO: Eventually this should be guaranteed unique: http://skbug.com/7111
|
||||||
opList->endFlush();
|
opList->endFlush();
|
||||||
@ -120,22 +120,19 @@ GrSemaphoresSubmitted GrDrawingManager::internalFlush(GrSurfaceProxy*,
|
|||||||
if (fFlushing || this->wasAbandoned()) {
|
if (fFlushing || this->wasAbandoned()) {
|
||||||
return GrSemaphoresSubmitted::kNo;
|
return GrSemaphoresSubmitted::kNo;
|
||||||
}
|
}
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
|
|
||||||
GrGpu* gpu = fContext->contextPriv().getGpu();
|
GrGpu* gpu = fContext->contextPriv().getGpu();
|
||||||
if (!gpu) {
|
if (!gpu) {
|
||||||
return GrSemaphoresSubmitted::kNo; // Can't flush while DDL recording
|
return GrSemaphoresSubmitted::kNo; // Can't flush while DDL recording
|
||||||
}
|
}
|
||||||
fFlushing = true;
|
fFlushing = true;
|
||||||
|
|
||||||
// Semi-usually the GrOpLists are already closed at this point, but sometimes Ganesh
|
|
||||||
// needs to flush mid-draw. In that case, the SkGpuDevice's GrOpLists won't be closed
|
|
||||||
// but need to be flushed anyway. Closing such GrOpLists here will mean new
|
|
||||||
// GrOpLists will be created to replace them if the SkGpuDevice(s) write to them again.
|
|
||||||
for (int i = 0; i < fOpLists.count(); ++i) {
|
for (int i = 0; i < fOpLists.count(); ++i) {
|
||||||
|
// Semi-usually the GrOpLists are already closed at this point, but sometimes Ganesh
|
||||||
|
// needs to flush mid-draw. In that case, the SkGpuDevice's GrOpLists won't be closed
|
||||||
|
// but need to be flushed anyway. Closing such GrOpLists here will mean new
|
||||||
|
// GrOpLists will be created to replace them if the SkGpuDevice(s) write to them again.
|
||||||
fOpLists[i]->makeClosed(*fContext->contextPriv().caps());
|
fOpLists[i]->makeClosed(*fContext->contextPriv().caps());
|
||||||
}
|
}
|
||||||
fActiveOpList = nullptr;
|
|
||||||
|
|
||||||
if (fSortRenderTargets) {
|
if (fSortRenderTargets) {
|
||||||
SkDEBUGCODE(bool result =) SkTTopoSort<GrOpList, GrOpList::TopoSortTraits>(&fOpLists);
|
SkDEBUGCODE(bool result =) SkTTopoSort<GrOpList, GrOpList::TopoSortTraits>(&fOpLists);
|
||||||
@ -222,7 +219,7 @@ GrSemaphoresSubmitted GrDrawingManager::internalFlush(GrSurfaceProxy*,
|
|||||||
for (int i = startIndex; i < stopIndex; ++i) {
|
for (int i = startIndex; i < stopIndex; ++i) {
|
||||||
if (fOpLists[i] && !fOpLists[i]->isFullyInstantiated()) {
|
if (fOpLists[i] && !fOpLists[i]->isFullyInstantiated()) {
|
||||||
// If the backing surface wasn't allocated drop the entire opList.
|
// If the backing surface wasn't allocated drop the entire opList.
|
||||||
end_oplist_flush_if_not_unique(fOpLists[i].get()); // http://skbug.com/7111
|
end_oplist_flush_if_not_unique(fOpLists[i]); // http://skbug.com/7111
|
||||||
fOpLists[i] = nullptr;
|
fOpLists[i] = nullptr;
|
||||||
}
|
}
|
||||||
if (fOpLists[i]) {
|
if (fOpLists[i]) {
|
||||||
@ -298,14 +295,14 @@ bool GrDrawingManager::executeOpLists(int startIndex, int stopIndex, GrOpFlushSt
|
|||||||
if (resourceProvider->explicitlyAllocateGPUResources()) {
|
if (resourceProvider->explicitlyAllocateGPUResources()) {
|
||||||
if (!fOpLists[i]->isFullyInstantiated()) {
|
if (!fOpLists[i]->isFullyInstantiated()) {
|
||||||
// If the backing surface wasn't allocated drop the draw of the entire opList.
|
// If the backing surface wasn't allocated drop the draw of the entire opList.
|
||||||
end_oplist_flush_if_not_unique(fOpLists[i].get()); // http://skbug.com/7111
|
end_oplist_flush_if_not_unique(fOpLists[i]); // http://skbug.com/7111
|
||||||
fOpLists[i] = nullptr;
|
fOpLists[i] = nullptr;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!fOpLists[i]->instantiate(resourceProvider)) {
|
if (!fOpLists[i]->instantiate(resourceProvider)) {
|
||||||
SkDebugf("OpList failed to instantiate.\n");
|
SkDebugf("OpList failed to instantiate.\n");
|
||||||
end_oplist_flush_if_not_unique(fOpLists[i].get()); // http://skbug.com/7111
|
end_oplist_flush_if_not_unique(fOpLists[i]); // http://skbug.com/7111
|
||||||
fOpLists[i] = nullptr;
|
fOpLists[i] = nullptr;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -353,7 +350,7 @@ bool GrDrawingManager::executeOpLists(int startIndex, int stopIndex, GrOpFlushSt
|
|||||||
if (!fOpLists[i]) {
|
if (!fOpLists[i]) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
end_oplist_flush_if_not_unique(fOpLists[i].get()); // http://skbug.com/7111
|
end_oplist_flush_if_not_unique(fOpLists[i]); // http://skbug.com/7111
|
||||||
fOpLists[i] = nullptr;
|
fOpLists[i] = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -365,7 +362,6 @@ GrSemaphoresSubmitted GrDrawingManager::prepareSurfaceForExternalIO(
|
|||||||
if (this->wasAbandoned()) {
|
if (this->wasAbandoned()) {
|
||||||
return GrSemaphoresSubmitted::kNo;
|
return GrSemaphoresSubmitted::kNo;
|
||||||
}
|
}
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
SkASSERT(proxy);
|
SkASSERT(proxy);
|
||||||
|
|
||||||
GrGpu* gpu = fContext->contextPriv().getGpu();
|
GrGpu* gpu = fContext->contextPriv().getGpu();
|
||||||
@ -392,8 +388,6 @@ GrSemaphoresSubmitted GrDrawingManager::prepareSurfaceForExternalIO(
|
|||||||
gpu->regenerateMipMapLevels(tex);
|
gpu->regenerateMipMapLevels(tex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -402,13 +396,10 @@ void GrDrawingManager::addOnFlushCallbackObject(GrOnFlushCallbackObject* onFlush
|
|||||||
}
|
}
|
||||||
|
|
||||||
void GrDrawingManager::moveOpListsToDDL(SkDeferredDisplayList* ddl) {
|
void GrDrawingManager::moveOpListsToDDL(SkDeferredDisplayList* ddl) {
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
|
|
||||||
// no opList should receive a new command after this
|
|
||||||
for (int i = 0; i < fOpLists.count(); ++i) {
|
for (int i = 0; i < fOpLists.count(); ++i) {
|
||||||
|
// no opList should receive a new command after this
|
||||||
fOpLists[i]->makeClosed(*fContext->contextPriv().caps());
|
fOpLists[i]->makeClosed(*fContext->contextPriv().caps());
|
||||||
}
|
}
|
||||||
fActiveOpList = nullptr;
|
|
||||||
|
|
||||||
SkASSERT(ddl->fOpLists.empty());
|
SkASSERT(ddl->fOpLists.empty());
|
||||||
ddl->fOpLists.swap(fOpLists);
|
ddl->fOpLists.swap(fOpLists);
|
||||||
@ -418,14 +409,10 @@ void GrDrawingManager::moveOpListsToDDL(SkDeferredDisplayList* ddl) {
|
|||||||
ddl->fPendingPaths = ccpr->detachPendingPaths();
|
ddl->fPendingPaths = ccpr->detachPendingPaths();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void GrDrawingManager::copyOpListsFromDDL(const SkDeferredDisplayList* ddl,
|
void GrDrawingManager::copyOpListsFromDDL(const SkDeferredDisplayList* ddl,
|
||||||
GrRenderTargetProxy* newDest) {
|
GrRenderTargetProxy* newDest) {
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
|
|
||||||
// Here we jam the proxy that backs the current replay SkSurface into the LazyProxyData.
|
// Here we jam the proxy that backs the current replay SkSurface into the LazyProxyData.
|
||||||
// The lazy proxy that references it (in the copied opLists) will steal its GrTexture.
|
// The lazy proxy that references it (in the copied opLists) will steal its GrTexture.
|
||||||
ddl->fLazyProxyData->fReplayDest = newDest;
|
ddl->fLazyProxyData->fReplayDest = newDest;
|
||||||
@ -436,42 +423,18 @@ void GrDrawingManager::copyOpListsFromDDL(const SkDeferredDisplayList* ddl,
|
|||||||
ccpr->mergePendingPaths(ddl->fPendingPaths);
|
ccpr->mergePendingPaths(ddl->fPendingPaths);
|
||||||
}
|
}
|
||||||
fOpLists.push_back_n(ddl->fOpLists.count(), ddl->fOpLists.begin());
|
fOpLists.push_back_n(ddl->fOpLists.count(), ddl->fOpLists.begin());
|
||||||
|
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef SK_DEBUG
|
|
||||||
void GrDrawingManager::validate() const {
|
|
||||||
if (fActiveOpList) {
|
|
||||||
SkASSERT(!fOpLists.empty());
|
|
||||||
SkASSERT(!fActiveOpList->isClosed());
|
|
||||||
SkASSERT(fActiveOpList == fOpLists.back().get());
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int i = 0; i < fOpLists.count(); ++i) {
|
|
||||||
if (fActiveOpList != fOpLists[i].get()) {
|
|
||||||
SkASSERT(fOpLists[i].get()->isClosed());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!fOpLists.empty() && !fOpLists.back()->isClosed()) {
|
|
||||||
SkASSERT(fActiveOpList == fOpLists.back().get());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
sk_sp<GrRenderTargetOpList> GrDrawingManager::newRTOpList(GrRenderTargetProxy* rtp,
|
sk_sp<GrRenderTargetOpList> GrDrawingManager::newRTOpList(GrRenderTargetProxy* rtp,
|
||||||
bool managedOpList) {
|
bool managedOpList) {
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
SkASSERT(fContext);
|
SkASSERT(fContext);
|
||||||
|
|
||||||
if (fActiveOpList) {
|
if (!fOpLists.empty()) {
|
||||||
// This is a temporary fix for the partial-MDB world. In that world we're not
|
// This is a temporary fix for the partial-MDB world. In that world we're not
|
||||||
// reordering so ops that (in the single opList world) would've just glommed onto the
|
// reordering so ops that (in the single opList world) would've just glommed onto the
|
||||||
// end of the single opList but referred to a far earlier RT need to appear in their
|
// end of the single opList but referred to a far earlier RT need to appear in their
|
||||||
// own opList.
|
// own opList.
|
||||||
fActiveOpList->makeClosed(*fContext->contextPriv().caps());
|
fOpLists.back()->makeClosed(*fContext->contextPriv().caps());
|
||||||
fActiveOpList = nullptr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto resourceProvider = fContext->contextPriv().resourceProvider();
|
auto resourceProvider = fContext->contextPriv().resourceProvider();
|
||||||
@ -485,24 +448,20 @@ sk_sp<GrRenderTargetOpList> GrDrawingManager::newRTOpList(GrRenderTargetProxy* r
|
|||||||
|
|
||||||
if (managedOpList) {
|
if (managedOpList) {
|
||||||
fOpLists.push_back() = opList;
|
fOpLists.push_back() = opList;
|
||||||
fActiveOpList = opList.get();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
return opList;
|
return opList;
|
||||||
}
|
}
|
||||||
|
|
||||||
sk_sp<GrTextureOpList> GrDrawingManager::newTextureOpList(GrTextureProxy* textureProxy) {
|
sk_sp<GrTextureOpList> GrDrawingManager::newTextureOpList(GrTextureProxy* textureProxy) {
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
SkASSERT(fContext);
|
SkASSERT(fContext);
|
||||||
|
|
||||||
if (fActiveOpList) {
|
if (!fOpLists.empty()) {
|
||||||
// This is a temporary fix for the partial-MDB world. In that world we're not
|
// This is a temporary fix for the partial-MDB world. In that world we're not
|
||||||
// reordering so ops that (in the single opList world) would've just glommed onto the
|
// reordering so ops that (in the single opList world) would've just glommed onto the
|
||||||
// end of the single opList but referred to a far earlier RT need to appear in their
|
// end of the single opList but referred to a far earlier RT need to appear in their
|
||||||
// own opList.
|
// own opList.
|
||||||
fActiveOpList->makeClosed(*fContext->contextPriv().caps());
|
fOpLists.back()->makeClosed(*fContext->contextPriv().caps());
|
||||||
fActiveOpList = nullptr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sk_sp<GrTextureOpList> opList(new GrTextureOpList(fContext->contextPriv().resourceProvider(),
|
sk_sp<GrTextureOpList> opList(new GrTextureOpList(fContext->contextPriv().resourceProvider(),
|
||||||
@ -513,9 +472,7 @@ sk_sp<GrTextureOpList> GrDrawingManager::newTextureOpList(GrTextureProxy* textur
|
|||||||
SkASSERT(textureProxy->getLastOpList() == opList.get());
|
SkASSERT(textureProxy->getLastOpList() == opList.get());
|
||||||
|
|
||||||
fOpLists.push_back() = opList;
|
fOpLists.push_back() = opList;
|
||||||
fActiveOpList = opList.get();
|
|
||||||
|
|
||||||
SkDEBUGCODE(this->validate());
|
|
||||||
return opList;
|
return opList;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,8 +102,6 @@ private:
|
|||||||
int numSemaphores,
|
int numSemaphores,
|
||||||
GrBackendSemaphore backendSemaphores[]);
|
GrBackendSemaphore backendSemaphores[]);
|
||||||
|
|
||||||
SkDEBUGCODE(void validate() const);
|
|
||||||
|
|
||||||
friend class GrContext; // for access to: ctor, abandon, reset & flush
|
friend class GrContext; // for access to: ctor, abandon, reset & flush
|
||||||
friend class GrContextPriv; // access to: flush
|
friend class GrContextPriv; // access to: flush
|
||||||
friend class GrOnFlushResourceProvider; // this is just a shallow wrapper around this class
|
friend class GrOnFlushResourceProvider; // this is just a shallow wrapper around this class
|
||||||
@ -120,7 +118,6 @@ private:
|
|||||||
|
|
||||||
bool fAbandoned;
|
bool fAbandoned;
|
||||||
SkTArray<sk_sp<GrOpList>> fOpLists;
|
SkTArray<sk_sp<GrOpList>> fOpLists;
|
||||||
GrOpList* fActiveOpList = nullptr;
|
|
||||||
// These are the IDs of the opLists currently being flushed (in internalFlush)
|
// These are the IDs of the opLists currently being flushed (in internalFlush)
|
||||||
SkSTArray<8, uint32_t, true> fFlushingOpListIDs;
|
SkSTArray<8, uint32_t, true> fFlushingOpListIDs;
|
||||||
// These are the new opLists generated by the onFlush CBs
|
// These are the new opLists generated by the onFlush CBs
|
||||||
|
Loading…
Reference in New Issue
Block a user