2018-05-29 20:13:26 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2018 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "tools/DDLPromiseImageHelper.h"
|
2018-05-29 20:13:26 +00:00
|
|
|
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "include/core/SkDeferredDisplayListRecorder.h"
|
2019-09-30 17:21:25 +00:00
|
|
|
#include "include/core/SkPicture.h"
|
|
|
|
#include "include/core/SkSerialProcs.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "include/core/SkYUVAIndex.h"
|
|
|
|
#include "include/core/SkYUVASizeInfo.h"
|
2020-07-07 17:18:47 +00:00
|
|
|
#include "include/gpu/GrDirectContext.h"
|
2020-11-10 19:17:58 +00:00
|
|
|
#include "include/gpu/GrYUVABackendTextures.h"
|
2020-07-29 01:06:43 +00:00
|
|
|
#include "src/codec/SkCodecImageGenerator.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/core/SkCachedData.h"
|
2020-07-14 21:16:32 +00:00
|
|
|
#include "src/core/SkMipmap.h"
|
2020-02-14 17:36:37 +00:00
|
|
|
#include "src/core/SkTaskGroup.h"
|
2020-10-14 15:23:11 +00:00
|
|
|
#include "src/gpu/GrDirectContextPriv.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/image/SkImage_Base.h"
|
|
|
|
#include "src/image/SkImage_GpuYUVA.h"
|
2018-05-29 20:13:26 +00:00
|
|
|
|
2020-04-17 20:20:55 +00:00
|
|
|
DDLPromiseImageHelper::PromiseImageInfo::PromiseImageInfo(int index,
|
|
|
|
uint32_t originalUniqueID,
|
|
|
|
const SkImageInfo& ii)
|
|
|
|
: fIndex(index)
|
|
|
|
, fOriginalUniqueID(originalUniqueID)
|
|
|
|
, fImageInfo(ii) {
|
|
|
|
}
|
|
|
|
|
|
|
|
DDLPromiseImageHelper::PromiseImageInfo::PromiseImageInfo(PromiseImageInfo&& other)
|
|
|
|
: fIndex(other.fIndex)
|
|
|
|
, fOriginalUniqueID(other.fOriginalUniqueID)
|
|
|
|
, fImageInfo(other.fImageInfo)
|
|
|
|
, fBaseLevel(other.fBaseLevel)
|
|
|
|
, fMipLevels(std::move(other.fMipLevels))
|
2020-08-25 16:40:32 +00:00
|
|
|
, fYUVAPixmaps(std::move(other.fYUVAPixmaps)) {
|
2020-04-17 20:20:55 +00:00
|
|
|
for (int i = 0; i < SkYUVASizeInfo::kMaxCount; ++i) {
|
|
|
|
fCallbackContexts[i] = std::move(other.fCallbackContexts[i]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
DDLPromiseImageHelper::PromiseImageInfo::~PromiseImageInfo() {}
|
|
|
|
|
2020-08-07 17:05:14 +00:00
|
|
|
std::unique_ptr<SkPixmap[]> DDLPromiseImageHelper::PromiseImageInfo::normalMipLevels() const {
|
2020-04-17 20:20:55 +00:00
|
|
|
SkASSERT(!this->isYUV());
|
|
|
|
std::unique_ptr<SkPixmap[]> pixmaps(new SkPixmap[this->numMipLevels()]);
|
|
|
|
pixmaps[0] = fBaseLevel.pixmap();
|
|
|
|
if (fMipLevels) {
|
|
|
|
for (int i = 0; i < fMipLevels->countLevels(); ++i) {
|
2020-07-14 21:16:32 +00:00
|
|
|
SkMipmap::Level mipLevel;
|
2020-04-17 20:20:55 +00:00
|
|
|
fMipLevels->getLevel(i, &mipLevel);
|
|
|
|
pixmaps[i+1] = mipLevel.fPixmap;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return pixmaps;
|
|
|
|
}
|
|
|
|
|
|
|
|
int DDLPromiseImageHelper::PromiseImageInfo::numMipLevels() const {
|
|
|
|
SkASSERT(!this->isYUV());
|
|
|
|
return fMipLevels ? fMipLevels->countLevels()+1 : 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
void DDLPromiseImageHelper::PromiseImageInfo::setMipLevels(const SkBitmap& baseLevel,
|
2020-07-14 21:16:32 +00:00
|
|
|
std::unique_ptr<SkMipmap> mipLevels) {
|
2020-04-17 20:20:55 +00:00
|
|
|
fBaseLevel = baseLevel;
|
|
|
|
fMipLevels = std::move(mipLevels);
|
|
|
|
}
|
|
|
|
|
|
|
|
///////////////////////////////////////////////////////////////////////////////////////////////////
|
2020-04-23 19:10:03 +00:00
|
|
|
PromiseImageCallbackContext::~PromiseImageCallbackContext() {
|
2020-12-17 14:38:59 +00:00
|
|
|
// See comment in release() about YUVA image creation failures.
|
|
|
|
// SkASSERT(fDoneCnt == fNumImages);1
|
2019-01-10 17:09:52 +00:00
|
|
|
SkASSERT(!fTotalFulfills || fDoneCnt);
|
2018-05-29 20:13:26 +00:00
|
|
|
|
2019-01-11 21:03:19 +00:00
|
|
|
if (fPromiseImageTexture) {
|
2019-05-20 12:38:07 +00:00
|
|
|
fContext->deleteBackendTexture(fPromiseImageTexture->backendTexture());
|
2018-05-29 20:13:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-23 19:10:03 +00:00
|
|
|
void PromiseImageCallbackContext::setBackendTexture(const GrBackendTexture& backendTexture) {
|
2019-02-28 15:03:03 +00:00
|
|
|
SkASSERT(!fPromiseImageTexture);
|
2020-02-14 17:36:37 +00:00
|
|
|
SkASSERT(fBackendFormat == backendTexture.getBackendFormat());
|
2019-01-11 21:03:19 +00:00
|
|
|
fPromiseImageTexture = SkPromiseImageTexture::Make(backendTexture);
|
2019-01-10 17:09:52 +00:00
|
|
|
}
|
|
|
|
|
2020-07-07 17:18:47 +00:00
|
|
|
void PromiseImageCallbackContext::destroyBackendTexture() {
|
|
|
|
SkASSERT(!fPromiseImageTexture || fPromiseImageTexture->unique());
|
|
|
|
|
|
|
|
if (fPromiseImageTexture) {
|
|
|
|
fContext->deleteBackendTexture(fPromiseImageTexture->backendTexture());
|
|
|
|
}
|
|
|
|
fPromiseImageTexture = nullptr;
|
|
|
|
}
|
|
|
|
|
2018-05-29 20:13:26 +00:00
|
|
|
///////////////////////////////////////////////////////////////////////////////////////////////////
|
|
|
|
|
|
|
|
sk_sp<SkData> DDLPromiseImageHelper::deflateSKP(const SkPicture* inputPicture) {
|
|
|
|
SkSerialProcs procs;
|
|
|
|
|
|
|
|
procs.fImageCtx = this;
|
|
|
|
procs.fImageProc = [](SkImage* image, void* ctx) -> sk_sp<SkData> {
|
|
|
|
auto helper = static_cast<DDLPromiseImageHelper*>(ctx);
|
|
|
|
|
|
|
|
int id = helper->findOrDefineImage(image);
|
|
|
|
|
2019-12-16 20:09:57 +00:00
|
|
|
// Even if 'id' is invalid (i.e., -1) write it to the SKP
|
|
|
|
return SkData::MakeWithCopy(&id, sizeof(id));
|
2018-05-29 20:13:26 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
return inputPicture->serialize(&procs);
|
|
|
|
}
|
|
|
|
|
2020-11-10 19:17:58 +00:00
|
|
|
static GrBackendTexture create_yuva_texture(GrDirectContext* direct,
|
|
|
|
const SkPixmap& pm,
|
|
|
|
int texIndex) {
|
2018-12-06 18:11:53 +00:00
|
|
|
SkASSERT(texIndex >= 0 && texIndex <= 3);
|
2019-09-04 19:05:35 +00:00
|
|
|
|
2020-05-06 15:40:03 +00:00
|
|
|
bool finishedBECreate = false;
|
|
|
|
auto markFinished = [](void* context) {
|
|
|
|
*(bool*)context = true;
|
|
|
|
};
|
2020-12-07 16:30:16 +00:00
|
|
|
auto beTex = direct->createBackendTexture(pm,
|
|
|
|
kTopLeft_GrSurfaceOrigin,
|
|
|
|
GrRenderable::kNo,
|
|
|
|
GrProtected::kNo,
|
|
|
|
markFinished,
|
|
|
|
&finishedBECreate);
|
2020-05-06 15:40:03 +00:00
|
|
|
if (beTex.isValid()) {
|
2020-07-07 17:18:47 +00:00
|
|
|
direct->submit();
|
2020-05-06 15:40:03 +00:00
|
|
|
while (!finishedBECreate) {
|
2020-07-07 17:18:47 +00:00
|
|
|
direct->checkAsyncWorkCompletion();
|
2020-05-06 15:40:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return beTex;
|
2018-12-06 18:11:53 +00:00
|
|
|
}
|
|
|
|
|
2020-02-14 17:36:37 +00:00
|
|
|
/*
|
|
|
|
* Create backend textures and upload data to them for all the textures required to satisfy
|
|
|
|
* a single promise image.
|
|
|
|
* For YUV textures this will result in up to 4 actual textures.
|
|
|
|
*/
|
2020-07-07 17:18:47 +00:00
|
|
|
void DDLPromiseImageHelper::CreateBETexturesForPromiseImage(GrDirectContext* direct,
|
2020-02-14 17:36:37 +00:00
|
|
|
PromiseImageInfo* info) {
|
|
|
|
if (info->isYUV()) {
|
2020-11-10 19:17:58 +00:00
|
|
|
int numPixmaps = info->yuvaInfo().numPlanes();
|
2020-02-14 17:36:37 +00:00
|
|
|
for (int j = 0; j < numPixmaps; ++j) {
|
|
|
|
const SkPixmap& yuvPixmap = info->yuvPixmap(j);
|
|
|
|
|
|
|
|
PromiseImageCallbackContext* callbackContext = info->callbackContext(j);
|
|
|
|
SkASSERT(callbackContext);
|
|
|
|
|
2020-04-15 19:54:34 +00:00
|
|
|
// DDL TODO: what should we do with mipmapped YUV images
|
2020-11-10 19:17:58 +00:00
|
|
|
callbackContext->setBackendTexture(create_yuva_texture(direct, yuvPixmap, j));
|
2020-02-14 17:36:37 +00:00
|
|
|
SkASSERT(callbackContext->promiseImageTexture());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
PromiseImageCallbackContext* callbackContext = info->callbackContext(0);
|
|
|
|
if (!callbackContext) {
|
|
|
|
// This texture would've been too large to fit on the GPU
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-04-17 20:20:55 +00:00
|
|
|
std::unique_ptr<SkPixmap[]> mipLevels = info->normalMipLevels();
|
2020-04-15 19:54:34 +00:00
|
|
|
|
2020-05-06 15:40:03 +00:00
|
|
|
bool finishedBECreate = false;
|
|
|
|
auto markFinished = [](void* context) {
|
|
|
|
*(bool*)context = true;
|
|
|
|
};
|
2020-12-07 16:30:16 +00:00
|
|
|
auto backendTex = direct->createBackendTexture(mipLevels.get(),
|
|
|
|
info->numMipLevels(),
|
|
|
|
kTopLeft_GrSurfaceOrigin,
|
|
|
|
GrRenderable::kNo,
|
|
|
|
GrProtected::kNo,
|
|
|
|
markFinished,
|
|
|
|
&finishedBECreate);
|
2020-02-14 17:36:37 +00:00
|
|
|
SkASSERT(backendTex.isValid());
|
2020-07-07 17:18:47 +00:00
|
|
|
direct->submit();
|
2020-05-06 15:40:03 +00:00
|
|
|
while (!finishedBECreate) {
|
2020-07-07 17:18:47 +00:00
|
|
|
direct->checkAsyncWorkCompletion();
|
2020-05-06 15:40:03 +00:00
|
|
|
}
|
2020-02-14 17:36:37 +00:00
|
|
|
|
|
|
|
callbackContext->setBackendTexture(backendTex);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-10 19:17:58 +00:00
|
|
|
void DDLPromiseImageHelper::DeleteBETexturesForPromiseImage(PromiseImageInfo* info) {
|
2020-02-26 15:27:07 +00:00
|
|
|
if (info->isYUV()) {
|
2020-11-10 19:17:58 +00:00
|
|
|
int numPixmaps = info->yuvaInfo().numPlanes();
|
2020-02-26 15:27:07 +00:00
|
|
|
for (int j = 0; j < numPixmaps; ++j) {
|
|
|
|
PromiseImageCallbackContext* callbackContext = info->callbackContext(j);
|
|
|
|
SkASSERT(callbackContext);
|
|
|
|
|
|
|
|
callbackContext->destroyBackendTexture();
|
|
|
|
SkASSERT(!callbackContext->promiseImageTexture());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
PromiseImageCallbackContext* callbackContext = info->callbackContext(0);
|
|
|
|
if (!callbackContext) {
|
|
|
|
// This texture would've been too large to fit on the GPU
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
callbackContext->destroyBackendTexture();
|
|
|
|
SkASSERT(!callbackContext->promiseImageTexture());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-07 17:18:47 +00:00
|
|
|
void DDLPromiseImageHelper::createCallbackContexts(GrDirectContext* direct) {
|
|
|
|
const GrCaps* caps = direct->priv().caps();
|
2020-02-14 17:36:37 +00:00
|
|
|
const int maxDimension = caps->maxTextureSize();
|
|
|
|
|
2019-01-10 16:33:06 +00:00
|
|
|
for (int i = 0; i < fImageInfo.count(); ++i) {
|
2020-02-14 17:36:37 +00:00
|
|
|
PromiseImageInfo& info = fImageInfo[i];
|
2018-05-29 20:13:26 +00:00
|
|
|
|
2018-09-27 18:26:47 +00:00
|
|
|
if (info.isYUV()) {
|
2020-11-10 19:17:58 +00:00
|
|
|
int numPixmaps = info.yuvaInfo().numPlanes();
|
2020-02-14 17:36:37 +00:00
|
|
|
|
2018-10-18 18:36:59 +00:00
|
|
|
for (int j = 0; j < numPixmaps; ++j) {
|
2018-09-27 18:26:47 +00:00
|
|
|
const SkPixmap& yuvPixmap = info.yuvPixmap(j);
|
|
|
|
|
2020-07-07 17:18:47 +00:00
|
|
|
GrBackendFormat backendFormat = direct->defaultBackendFormat(yuvPixmap.colorType(),
|
|
|
|
GrRenderable::kNo);
|
2018-12-06 18:11:53 +00:00
|
|
|
|
2020-02-14 17:36:37 +00:00
|
|
|
sk_sp<PromiseImageCallbackContext> callbackContext(
|
2020-07-07 17:18:47 +00:00
|
|
|
new PromiseImageCallbackContext(direct, backendFormat));
|
2018-09-27 18:26:47 +00:00
|
|
|
|
2020-02-14 17:36:37 +00:00
|
|
|
info.setCallbackContext(j, std::move(callbackContext));
|
2018-09-27 18:26:47 +00:00
|
|
|
}
|
|
|
|
} else {
|
2020-04-17 20:20:55 +00:00
|
|
|
const SkBitmap& baseLevel = info.baseLevel();
|
2018-09-27 18:26:47 +00:00
|
|
|
|
2020-02-14 17:36:37 +00:00
|
|
|
// TODO: explicitly mark the PromiseImageInfo as too big and check in uploadAllToGPU
|
2020-04-17 20:20:55 +00:00
|
|
|
if (maxDimension < std::max(baseLevel.width(), baseLevel.height())) {
|
2020-02-14 17:36:37 +00:00
|
|
|
// This won't fit on the GPU. Fallback to a raster-backed image per tile.
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2020-07-07 17:18:47 +00:00
|
|
|
GrBackendFormat backendFormat = direct->defaultBackendFormat(baseLevel.colorType(),
|
|
|
|
GrRenderable::kNo);
|
2020-02-14 17:36:37 +00:00
|
|
|
if (!caps->isFormatTexturable(backendFormat)) {
|
|
|
|
continue;
|
|
|
|
}
|
2019-06-10 19:09:34 +00:00
|
|
|
|
2020-02-14 17:36:37 +00:00
|
|
|
sk_sp<PromiseImageCallbackContext> callbackContext(
|
2020-07-07 17:18:47 +00:00
|
|
|
new PromiseImageCallbackContext(direct, backendFormat));
|
2018-09-27 18:26:47 +00:00
|
|
|
|
2020-02-14 17:36:37 +00:00
|
|
|
info.setCallbackContext(0, std::move(callbackContext));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-07 17:18:47 +00:00
|
|
|
void DDLPromiseImageHelper::uploadAllToGPU(SkTaskGroup* taskGroup, GrDirectContext* direct) {
|
2020-02-14 17:36:37 +00:00
|
|
|
if (taskGroup) {
|
|
|
|
for (int i = 0; i < fImageInfo.count(); ++i) {
|
|
|
|
PromiseImageInfo* info = &fImageInfo[i];
|
|
|
|
|
2020-07-07 17:18:47 +00:00
|
|
|
taskGroup->add([direct, info]() { CreateBETexturesForPromiseImage(direct, info); });
|
2020-02-14 17:36:37 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for (int i = 0; i < fImageInfo.count(); ++i) {
|
2020-07-07 17:18:47 +00:00
|
|
|
CreateBETexturesForPromiseImage(direct, &fImageInfo[i]);
|
2018-09-27 18:26:47 +00:00
|
|
|
}
|
2019-01-10 17:09:52 +00:00
|
|
|
}
|
|
|
|
}
|
2018-09-27 18:26:47 +00:00
|
|
|
|
2020-07-07 17:18:47 +00:00
|
|
|
void DDLPromiseImageHelper::deleteAllFromGPU(SkTaskGroup* taskGroup, GrDirectContext* direct) {
|
2020-02-26 15:27:07 +00:00
|
|
|
if (taskGroup) {
|
|
|
|
for (int i = 0; i < fImageInfo.count(); ++i) {
|
|
|
|
PromiseImageInfo* info = &fImageInfo[i];
|
|
|
|
|
2020-11-10 19:17:58 +00:00
|
|
|
taskGroup->add([info]() { DeleteBETexturesForPromiseImage(info); });
|
2020-02-26 15:27:07 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for (int i = 0; i < fImageInfo.count(); ++i) {
|
2020-11-10 19:17:58 +00:00
|
|
|
DeleteBETexturesForPromiseImage(&fImageInfo[i]);
|
2020-02-26 15:27:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-29 20:13:26 +00:00
|
|
|
sk_sp<SkPicture> DDLPromiseImageHelper::reinflateSKP(
|
|
|
|
SkDeferredDisplayListRecorder* recorder,
|
|
|
|
SkData* compressedPictureData,
|
|
|
|
SkTArray<sk_sp<SkImage>>* promiseImages) const {
|
|
|
|
PerRecorderContext perRecorderContext { recorder, this, promiseImages };
|
|
|
|
|
|
|
|
SkDeserialProcs procs;
|
|
|
|
procs.fImageCtx = (void*) &perRecorderContext;
|
2020-02-14 17:36:37 +00:00
|
|
|
procs.fImageProc = CreatePromiseImages;
|
2018-05-29 20:13:26 +00:00
|
|
|
|
|
|
|
return SkPicture::MakeFromData(compressedPictureData, &procs);
|
|
|
|
}
|
|
|
|
|
|
|
|
// This generates promise images to replace the indices in the compressed picture. This
|
|
|
|
// reconstitution is performed separately in each thread so we end up with multiple
|
|
|
|
// promise images referring to the same GrBackendTexture.
|
2020-02-14 17:36:37 +00:00
|
|
|
sk_sp<SkImage> DDLPromiseImageHelper::CreatePromiseImages(const void* rawData,
|
2018-05-29 20:13:26 +00:00
|
|
|
size_t length, void* ctxIn) {
|
|
|
|
PerRecorderContext* perRecorderContext = static_cast<PerRecorderContext*>(ctxIn);
|
|
|
|
const DDLPromiseImageHelper* helper = perRecorderContext->fHelper;
|
|
|
|
SkDeferredDisplayListRecorder* recorder = perRecorderContext->fRecorder;
|
|
|
|
|
|
|
|
SkASSERT(length == sizeof(int));
|
|
|
|
|
|
|
|
const int* indexPtr = static_cast<const int*>(rawData);
|
2019-12-16 20:09:57 +00:00
|
|
|
if (!helper->isValidID(*indexPtr)) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2018-05-29 20:13:26 +00:00
|
|
|
|
|
|
|
const DDLPromiseImageHelper::PromiseImageInfo& curImage = helper->getInfo(*indexPtr);
|
|
|
|
|
2020-02-14 17:36:37 +00:00
|
|
|
// If there is no callback context that means 'createCallbackContexts' determined the
|
|
|
|
// texture wouldn't fit on the GPU. Create a separate bitmap-backed image for each thread.
|
|
|
|
if (!curImage.isYUV() && !curImage.callbackContext(0)) {
|
2020-04-17 20:20:55 +00:00
|
|
|
SkASSERT(curImage.baseLevel().isImmutable());
|
2020-12-23 16:50:36 +00:00
|
|
|
return curImage.baseLevel().asImage();
|
2018-09-27 18:26:47 +00:00
|
|
|
}
|
2020-02-14 17:36:37 +00:00
|
|
|
|
2018-09-27 18:26:47 +00:00
|
|
|
SkASSERT(curImage.index() == *indexPtr);
|
|
|
|
|
|
|
|
sk_sp<SkImage> image;
|
|
|
|
if (curImage.isYUV()) {
|
2018-10-29 20:26:02 +00:00
|
|
|
GrBackendFormat backendFormats[SkYUVASizeInfo::kMaxCount];
|
2020-11-10 19:17:58 +00:00
|
|
|
const SkYUVAInfo& yuvaInfo = curImage.yuvaInfo();
|
2018-10-29 20:26:02 +00:00
|
|
|
void* contexts[SkYUVASizeInfo::kMaxCount] = { nullptr, nullptr, nullptr, nullptr };
|
2020-11-10 19:17:58 +00:00
|
|
|
int textureCount = yuvaInfo.numPlanes();
|
2018-10-18 18:36:59 +00:00
|
|
|
for (int i = 0; i < textureCount; ++i) {
|
2020-02-14 17:36:37 +00:00
|
|
|
backendFormats[i] = curImage.backendFormat(i);
|
2018-09-27 18:26:47 +00:00
|
|
|
contexts[i] = curImage.refCallbackContext(i).release();
|
|
|
|
}
|
2020-11-10 19:17:58 +00:00
|
|
|
GrYUVABackendTextureInfo yuvaBackendTextures(yuvaInfo,
|
|
|
|
backendFormats,
|
|
|
|
GrMipmapped::kNo,
|
|
|
|
kTopLeft_GrSurfaceOrigin);
|
2018-10-18 16:13:18 +00:00
|
|
|
|
2019-03-08 18:28:46 +00:00
|
|
|
image = recorder->makeYUVAPromiseTexture(
|
2020-11-10 19:17:58 +00:00
|
|
|
yuvaBackendTextures,
|
2019-03-08 18:28:46 +00:00
|
|
|
curImage.refOverallColorSpace(),
|
2020-04-23 19:10:03 +00:00
|
|
|
PromiseImageCallbackContext::PromiseImageFulfillProc,
|
|
|
|
PromiseImageCallbackContext::PromiseImageReleaseProc,
|
2020-11-09 20:40:27 +00:00
|
|
|
contexts);
|
2020-12-17 14:38:59 +00:00
|
|
|
if (!image) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2019-01-10 17:09:52 +00:00
|
|
|
for (int i = 0; i < textureCount; ++i) {
|
|
|
|
curImage.callbackContext(i)->wasAddedToImage();
|
|
|
|
}
|
2019-03-04 17:18:53 +00:00
|
|
|
|
|
|
|
#ifdef SK_DEBUG
|
|
|
|
{
|
|
|
|
// By the peekProxy contract this image should not have a single backing proxy so
|
|
|
|
// should return null. The call should also not trigger the conversion to RGBA.
|
|
|
|
SkImage_GpuYUVA* yuva = reinterpret_cast<SkImage_GpuYUVA*>(image.get());
|
|
|
|
SkASSERT(!yuva->peekProxy());
|
|
|
|
SkASSERT(!yuva->peekProxy()); // the first call didn't force a conversion to RGBA
|
|
|
|
}
|
|
|
|
#endif
|
2018-09-27 18:26:47 +00:00
|
|
|
} else {
|
2020-08-07 21:35:54 +00:00
|
|
|
const GrBackendFormat& backendFormat = curImage.backendFormat(0);
|
2018-12-14 14:18:50 +00:00
|
|
|
SkASSERT(backendFormat.isValid());
|
2018-09-27 18:26:47 +00:00
|
|
|
|
|
|
|
// Each DDL recorder gets its own ref on the promise callback context for the
|
|
|
|
// promise images it creates.
|
2020-11-09 20:40:27 +00:00
|
|
|
image = recorder->makePromiseTexture(backendFormat,
|
|
|
|
curImage.overallWidth(),
|
|
|
|
curImage.overallHeight(),
|
|
|
|
curImage.mipMapped(0),
|
|
|
|
GrSurfaceOrigin::kTopLeft_GrSurfaceOrigin,
|
|
|
|
curImage.overallColorType(),
|
|
|
|
curImage.overallAlphaType(),
|
|
|
|
curImage.refOverallColorSpace(),
|
|
|
|
PromiseImageCallbackContext::PromiseImageFulfillProc,
|
|
|
|
PromiseImageCallbackContext::PromiseImageReleaseProc,
|
|
|
|
(void*)curImage.refCallbackContext(0).release());
|
2019-01-10 17:09:52 +00:00
|
|
|
curImage.callbackContext(0)->wasAddedToImage();
|
2018-05-29 20:13:26 +00:00
|
|
|
}
|
|
|
|
perRecorderContext->fPromiseImages->push_back(image);
|
|
|
|
SkASSERT(image);
|
|
|
|
return image;
|
|
|
|
}
|
|
|
|
|
|
|
|
int DDLPromiseImageHelper::findImage(SkImage* image) const {
|
|
|
|
for (int i = 0; i < fImageInfo.count(); ++i) {
|
2018-09-27 18:26:47 +00:00
|
|
|
if (fImageInfo[i].originalUniqueID() == image->uniqueID()) { // trying to dedup here
|
|
|
|
SkASSERT(fImageInfo[i].index() == i);
|
|
|
|
SkASSERT(this->isValidID(i) && this->isValidID(fImageInfo[i].index()));
|
2018-05-29 20:13:26 +00:00
|
|
|
return i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
int DDLPromiseImageHelper::addImage(SkImage* image) {
|
2018-09-27 18:26:47 +00:00
|
|
|
SkImage_Base* ib = as_IB(image);
|
|
|
|
|
|
|
|
SkImageInfo overallII = SkImageInfo::Make(image->width(), image->height(),
|
2019-05-13 19:59:10 +00:00
|
|
|
image->colorType() == kBGRA_8888_SkColorType
|
|
|
|
? kRGBA_8888_SkColorType
|
|
|
|
: image->colorType(),
|
|
|
|
image->alphaType(),
|
2018-09-27 18:26:47 +00:00
|
|
|
image->refColorSpace());
|
|
|
|
|
|
|
|
PromiseImageInfo& newImageInfo = fImageInfo.emplace_back(fImageInfo.count(),
|
|
|
|
image->uniqueID(),
|
|
|
|
overallII);
|
|
|
|
|
2020-07-29 01:06:43 +00:00
|
|
|
auto codec = SkCodecImageGenerator::MakeFromEncodedCodec(ib->refEncodedData());
|
2020-08-27 15:00:04 +00:00
|
|
|
SkYUVAPixmapInfo yuvaInfo;
|
2020-09-01 19:01:15 +00:00
|
|
|
if (codec && codec->queryYUVAInfo(fSupportedYUVADataTypes, &yuvaInfo)) {
|
2020-08-27 15:00:04 +00:00
|
|
|
auto yuvaPixmaps = SkYUVAPixmaps::Allocate(yuvaInfo);
|
|
|
|
SkAssertResult(codec->getYUVAPlanes(yuvaPixmaps));
|
2020-08-25 16:40:32 +00:00
|
|
|
SkASSERT(yuvaPixmaps.isValid());
|
|
|
|
newImageInfo.setYUVPlanes(std::move(yuvaPixmaps));
|
2018-09-27 18:26:47 +00:00
|
|
|
} else {
|
|
|
|
sk_sp<SkImage> rasterImage = image->makeRasterImage(); // force decoding of lazy images
|
2019-12-16 16:22:17 +00:00
|
|
|
if (!rasterImage) {
|
|
|
|
return -1;
|
|
|
|
}
|
2018-05-29 20:13:26 +00:00
|
|
|
|
2018-09-27 18:26:47 +00:00
|
|
|
SkBitmap tmp;
|
|
|
|
tmp.allocPixels(overallII);
|
2018-05-29 20:13:26 +00:00
|
|
|
|
2020-08-27 16:44:07 +00:00
|
|
|
if (!rasterImage->readPixels(nullptr, tmp.pixmap(), 0, 0)) {
|
2018-09-27 18:26:47 +00:00
|
|
|
return -1;
|
|
|
|
}
|
2018-05-29 20:13:26 +00:00
|
|
|
|
2018-09-27 18:26:47 +00:00
|
|
|
tmp.setImmutable();
|
2020-04-17 20:20:55 +00:00
|
|
|
|
|
|
|
// Given how the DDL testing harness works (i.e., only modifying the SkImages w/in an
|
|
|
|
// SKP) we don't know if a given SkImage will require mipmapping. To work around this
|
|
|
|
// we just try to create all the backend textures as mipmapped but, failing that, fall
|
|
|
|
// back to un-mipped.
|
2020-07-14 21:16:32 +00:00
|
|
|
std::unique_ptr<SkMipmap> mipmaps(SkMipmap::Build(tmp.pixmap(), nullptr));
|
2020-04-17 20:20:55 +00:00
|
|
|
|
|
|
|
newImageInfo.setMipLevels(tmp, std::move(mipmaps));
|
2018-05-29 20:13:26 +00:00
|
|
|
}
|
2018-09-27 18:26:47 +00:00
|
|
|
// In either case newImageInfo's PromiseImageCallbackContext is filled in by uploadAllToGPU
|
2018-05-29 20:13:26 +00:00
|
|
|
|
|
|
|
return fImageInfo.count()-1;
|
|
|
|
}
|
|
|
|
|
|
|
|
int DDLPromiseImageHelper::findOrDefineImage(SkImage* image) {
|
|
|
|
int preExistingID = this->findImage(image);
|
|
|
|
if (preExistingID >= 0) {
|
|
|
|
SkASSERT(this->isValidID(preExistingID));
|
|
|
|
return preExistingID;
|
|
|
|
}
|
|
|
|
|
|
|
|
int newID = this->addImage(image);
|
|
|
|
return newID;
|
|
|
|
}
|