2017-07-18 18:49:38 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2017 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
2020-04-06 17:57:30 +00:00
|
|
|
#include "include/core/SkCanvas.h"
|
2021-05-11 17:41:46 +00:00
|
|
|
#include "include/core/SkSpan.h"
|
2020-04-06 17:57:30 +00:00
|
|
|
#include "include/core/SkSurface.h"
|
2020-07-06 14:56:46 +00:00
|
|
|
#include "include/gpu/GrDirectContext.h"
|
2020-10-14 15:23:11 +00:00
|
|
|
#include "src/gpu/GrDirectContextPriv.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/gpu/GrGpu.h"
|
|
|
|
#include "src/gpu/GrProxyProvider.h"
|
|
|
|
#include "src/gpu/GrResourceAllocator.h"
|
2021-04-09 21:58:14 +00:00
|
|
|
#include "src/gpu/GrResourceProviderPriv.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/gpu/GrSurfaceProxyPriv.h"
|
2020-03-05 19:14:18 +00:00
|
|
|
#include "src/gpu/GrTexture.h"
|
2019-06-18 13:58:02 +00:00
|
|
|
#include "src/gpu/GrTextureProxy.h"
|
2020-04-06 17:57:30 +00:00
|
|
|
#include "tests/Test.h"
|
2020-10-13 00:45:06 +00:00
|
|
|
#include "tools/gpu/ManagedBackendTexture.h"
|
2018-08-21 17:52:09 +00:00
|
|
|
|
2021-04-09 17:43:32 +00:00
|
|
|
namespace {
|
2017-07-21 15:38:13 +00:00
|
|
|
struct ProxyParams {
|
|
|
|
int fSize;
|
2019-07-17 13:59:59 +00:00
|
|
|
GrRenderable fRenderable;
|
2019-07-23 14:27:09 +00:00
|
|
|
GrColorType fColorType;
|
2017-07-21 15:38:13 +00:00
|
|
|
SkBackingFit fFit;
|
|
|
|
int fSampleCnt;
|
2019-03-26 18:50:08 +00:00
|
|
|
SkBudgeted fBudgeted;
|
2021-04-09 17:43:32 +00:00
|
|
|
enum Kind {
|
|
|
|
kDeferred,
|
|
|
|
kBackend,
|
|
|
|
kFullyLazy,
|
|
|
|
kLazy,
|
|
|
|
kInstantiated
|
|
|
|
};
|
|
|
|
Kind fKind;
|
|
|
|
GrUniqueKey fUniqueKey = GrUniqueKey();
|
2017-07-21 15:38:13 +00:00
|
|
|
// TODO: do we care about mipmapping
|
|
|
|
};
|
|
|
|
|
2021-04-09 17:43:32 +00:00
|
|
|
constexpr GrRenderable kRT = GrRenderable::kYes;
|
|
|
|
constexpr GrRenderable kNotRT = GrRenderable::kNo;
|
|
|
|
|
|
|
|
constexpr GrColorType kRGBA = GrColorType::kRGBA_8888;
|
|
|
|
constexpr GrColorType kAlpha = GrColorType::kAlpha_8;
|
|
|
|
|
|
|
|
constexpr SkBackingFit kE = SkBackingFit::kExact;
|
|
|
|
constexpr SkBackingFit kA = SkBackingFit::kApprox;
|
|
|
|
|
|
|
|
constexpr SkBudgeted kNotB = SkBudgeted::kNo;
|
2021-04-09 21:58:14 +00:00
|
|
|
constexpr SkBudgeted kB = SkBudgeted::kYes;
|
2021-04-09 17:43:32 +00:00
|
|
|
|
|
|
|
constexpr ProxyParams::Kind kDeferred = ProxyParams::Kind::kDeferred;
|
|
|
|
constexpr ProxyParams::Kind kBackend = ProxyParams::Kind::kBackend;
|
2021-04-09 21:58:14 +00:00
|
|
|
constexpr ProxyParams::Kind kInstantiated = ProxyParams::Kind::kInstantiated;
|
|
|
|
constexpr ProxyParams::Kind kLazy = ProxyParams::Kind::kLazy;
|
|
|
|
constexpr ProxyParams::Kind kFullyLazy = ProxyParams::Kind::kFullyLazy;
|
2021-04-09 17:43:32 +00:00
|
|
|
};
|
|
|
|
|
2019-06-11 12:08:08 +00:00
|
|
|
static sk_sp<GrSurfaceProxy> make_deferred(GrProxyProvider* proxyProvider, const GrCaps* caps,
|
|
|
|
const ProxyParams& p) {
|
2019-07-30 16:49:10 +00:00
|
|
|
const GrBackendFormat format = caps->getDefaultBackendFormat(p.fColorType, p.fRenderable);
|
2020-03-27 00:37:01 +00:00
|
|
|
return proxyProvider->createProxy(format, {p.fSize, p.fSize}, p.fRenderable, p.fSampleCnt,
|
2020-07-21 13:27:25 +00:00
|
|
|
GrMipmapped::kNo, p.fFit, p.fBudgeted, GrProtected::kNo);
|
2017-07-21 15:38:13 +00:00
|
|
|
}
|
|
|
|
|
2020-10-13 00:45:06 +00:00
|
|
|
static sk_sp<GrSurfaceProxy> make_backend(GrDirectContext* dContext, const ProxyParams& p) {
|
2020-07-20 19:00:36 +00:00
|
|
|
GrProxyProvider* proxyProvider = dContext->priv().proxyProvider();
|
2018-01-16 13:06:32 +00:00
|
|
|
|
2019-07-23 14:27:09 +00:00
|
|
|
SkColorType skColorType = GrColorTypeToSkColorType(p.fColorType);
|
|
|
|
SkASSERT(SkColorType::kUnknown_SkColorType != skColorType);
|
|
|
|
|
2020-10-13 00:45:06 +00:00
|
|
|
auto mbet = sk_gpu_test::ManagedBackendTexture::MakeWithoutData(
|
|
|
|
dContext, p.fSize, p.fSize, skColorType, GrMipmapped::kNo, GrRenderable::kNo);
|
2020-05-06 15:40:03 +00:00
|
|
|
|
2020-10-13 00:45:06 +00:00
|
|
|
if (!mbet) {
|
2018-09-25 13:31:10 +00:00
|
|
|
return nullptr;
|
|
|
|
}
|
2017-12-12 18:01:25 +00:00
|
|
|
|
2020-10-13 00:45:06 +00:00
|
|
|
return proxyProvider->wrapBackendTexture(mbet->texture(),
|
|
|
|
kBorrow_GrWrapOwnership,
|
|
|
|
GrWrapCacheable::kNo,
|
|
|
|
kRead_GrIOType,
|
|
|
|
mbet->refCountedCallback());
|
2017-07-21 15:38:13 +00:00
|
|
|
}
|
2017-07-18 18:49:38 +00:00
|
|
|
|
2021-04-09 17:43:32 +00:00
|
|
|
static sk_sp<GrSurfaceProxy> make_fully_lazy(GrProxyProvider* proxyProvider, const GrCaps* caps,
|
|
|
|
const ProxyParams& p) {
|
|
|
|
const GrBackendFormat format = caps->getDefaultBackendFormat(p.fColorType, p.fRenderable);
|
|
|
|
auto cb = [p](GrResourceProvider* provider, const GrSurfaceProxy::LazySurfaceDesc& desc) {
|
|
|
|
auto tex = provider->createTexture({p.fSize, p.fSize}, desc.fFormat,
|
|
|
|
desc.fRenderable, desc.fSampleCnt,
|
|
|
|
desc.fMipmapped, desc.fBudgeted,
|
|
|
|
desc.fProtected);
|
|
|
|
return GrSurfaceProxy::LazyCallbackResult(std::move(tex));
|
|
|
|
};
|
|
|
|
return GrProxyProvider::MakeFullyLazyProxy(std::move(cb), format, p.fRenderable, p.fSampleCnt,
|
|
|
|
GrProtected::kNo, *caps,
|
|
|
|
GrSurfaceProxy::UseAllocator::kYes);
|
|
|
|
}
|
|
|
|
|
|
|
|
static sk_sp<GrSurfaceProxy> make_lazy(GrProxyProvider* proxyProvider, const GrCaps* caps,
|
|
|
|
const ProxyParams& p) {
|
|
|
|
const GrBackendFormat format = caps->getDefaultBackendFormat(p.fColorType, p.fRenderable);
|
|
|
|
auto cb = [](GrResourceProvider* provider, const GrSurfaceProxy::LazySurfaceDesc& desc) {
|
|
|
|
auto tex = provider->createTexture(desc.fDimensions, desc.fFormat,
|
|
|
|
desc.fRenderable, desc.fSampleCnt,
|
|
|
|
desc.fMipmapped, desc.fBudgeted,
|
|
|
|
desc.fProtected);
|
|
|
|
return GrSurfaceProxy::LazyCallbackResult(std::move(tex));
|
|
|
|
};
|
|
|
|
return proxyProvider->createLazyProxy(std::move(cb), format, {p.fSize, p.fSize},
|
|
|
|
GrMipmapped::kNo, GrMipmapStatus::kNotAllocated,
|
|
|
|
GrInternalSurfaceFlags::kNone,
|
|
|
|
p.fFit, p.fBudgeted, GrProtected::kNo,
|
|
|
|
GrSurfaceProxy::UseAllocator::kYes);
|
|
|
|
}
|
|
|
|
|
|
|
|
static sk_sp<GrSurfaceProxy> make_proxy(GrDirectContext* dContext, const ProxyParams& p) {
|
|
|
|
GrProxyProvider* proxyProvider = dContext->priv().proxyProvider();
|
|
|
|
const GrCaps* caps = dContext->priv().caps();
|
|
|
|
sk_sp<GrSurfaceProxy> proxy;
|
|
|
|
switch (p.fKind) {
|
|
|
|
case ProxyParams::kDeferred:
|
|
|
|
proxy = make_deferred(proxyProvider, caps, p);
|
|
|
|
break;
|
|
|
|
case ProxyParams::kBackend:
|
|
|
|
proxy = make_backend(dContext, p);
|
|
|
|
break;
|
|
|
|
case ProxyParams::kFullyLazy:
|
|
|
|
proxy = make_fully_lazy(proxyProvider, caps, p);
|
|
|
|
break;
|
|
|
|
case ProxyParams::kLazy:
|
|
|
|
proxy = make_lazy(proxyProvider, caps, p);
|
|
|
|
break;
|
|
|
|
case ProxyParams::kInstantiated:
|
|
|
|
proxy = make_deferred(proxyProvider, caps, p);
|
|
|
|
if (proxy) {
|
|
|
|
auto surf = proxy->priv().createSurface(dContext->priv().resourceProvider());
|
|
|
|
proxy->priv().assign(std::move(surf));
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (proxy && p.fUniqueKey.isValid()) {
|
|
|
|
SkASSERT(proxy->asTextureProxy());
|
|
|
|
proxyProvider->assignUniqueKeyToProxy(p.fUniqueKey, proxy->asTextureProxy());
|
|
|
|
}
|
|
|
|
return proxy;
|
|
|
|
}
|
|
|
|
|
2017-07-21 15:38:13 +00:00
|
|
|
// Basic test that two proxies with overlapping intervals and compatible descriptors are
|
|
|
|
// assigned different GrSurfaces.
|
2021-04-08 15:39:55 +00:00
|
|
|
static void overlap_test(skiatest::Reporter* reporter, GrDirectContext* dContext,
|
2019-06-11 12:08:08 +00:00
|
|
|
sk_sp<GrSurfaceProxy> p1, sk_sp<GrSurfaceProxy> p2,
|
2019-04-02 15:52:03 +00:00
|
|
|
bool expectedResult) {
|
2021-04-15 15:01:46 +00:00
|
|
|
GrResourceAllocator alloc(dContext);
|
2017-07-18 18:49:38 +00:00
|
|
|
|
2021-02-09 17:41:10 +00:00
|
|
|
alloc.addInterval(p1.get(), 0, 4, GrResourceAllocator::ActualUse::kYes);
|
2019-03-26 18:50:08 +00:00
|
|
|
alloc.incOps();
|
2021-02-09 17:41:10 +00:00
|
|
|
alloc.addInterval(p2.get(), 1, 2, GrResourceAllocator::ActualUse::kYes);
|
2019-03-26 18:50:08 +00:00
|
|
|
alloc.incOps();
|
2017-07-18 18:49:38 +00:00
|
|
|
|
2021-04-09 20:52:48 +00:00
|
|
|
REPORTER_ASSERT(reporter, alloc.planAssignment());
|
2021-04-09 21:58:14 +00:00
|
|
|
REPORTER_ASSERT(reporter, alloc.makeBudgetHeadroom());
|
2021-03-08 17:19:30 +00:00
|
|
|
REPORTER_ASSERT(reporter, alloc.assign());
|
2017-07-18 18:49:38 +00:00
|
|
|
|
2018-07-31 21:25:29 +00:00
|
|
|
REPORTER_ASSERT(reporter, p1->peekSurface());
|
|
|
|
REPORTER_ASSERT(reporter, p2->peekSurface());
|
2017-07-21 15:38:13 +00:00
|
|
|
bool doTheBackingStoresMatch = p1->underlyingUniqueID() == p2->underlyingUniqueID();
|
|
|
|
REPORTER_ASSERT(reporter, expectedResult == doTheBackingStoresMatch);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Test various cases when two proxies do not have overlapping intervals.
|
|
|
|
// This mainly acts as a test of the ResourceAllocator's free pool.
|
2021-04-08 15:39:55 +00:00
|
|
|
static void non_overlap_test(skiatest::Reporter* reporter, GrDirectContext* dContext,
|
2019-06-11 12:08:08 +00:00
|
|
|
sk_sp<GrSurfaceProxy> p1, sk_sp<GrSurfaceProxy> p2,
|
2017-07-21 15:38:13 +00:00
|
|
|
bool expectedResult) {
|
2021-04-15 15:01:46 +00:00
|
|
|
GrResourceAllocator alloc(dContext);
|
2019-03-26 18:50:08 +00:00
|
|
|
|
|
|
|
alloc.incOps();
|
|
|
|
alloc.incOps();
|
|
|
|
alloc.incOps();
|
|
|
|
alloc.incOps();
|
|
|
|
alloc.incOps();
|
|
|
|
alloc.incOps();
|
2017-07-21 15:38:13 +00:00
|
|
|
|
2021-02-09 17:41:10 +00:00
|
|
|
alloc.addInterval(p1.get(), 0, 2, GrResourceAllocator::ActualUse::kYes);
|
|
|
|
alloc.addInterval(p2.get(), 3, 5, GrResourceAllocator::ActualUse::kYes);
|
2019-04-24 12:49:48 +00:00
|
|
|
|
2021-04-09 20:52:48 +00:00
|
|
|
REPORTER_ASSERT(reporter, alloc.planAssignment());
|
2021-04-09 21:58:14 +00:00
|
|
|
REPORTER_ASSERT(reporter, alloc.makeBudgetHeadroom());
|
2021-03-08 17:19:30 +00:00
|
|
|
REPORTER_ASSERT(reporter, alloc.assign());
|
2017-07-21 15:38:13 +00:00
|
|
|
|
2018-07-31 21:25:29 +00:00
|
|
|
REPORTER_ASSERT(reporter, p1->peekSurface());
|
|
|
|
REPORTER_ASSERT(reporter, p2->peekSurface());
|
2017-07-21 15:38:13 +00:00
|
|
|
bool doTheBackingStoresMatch = p1->underlyingUniqueID() == p2->underlyingUniqueID();
|
|
|
|
REPORTER_ASSERT(reporter, expectedResult == doTheBackingStoresMatch);
|
2017-07-18 18:49:38 +00:00
|
|
|
}
|
|
|
|
|
2017-07-21 15:38:13 +00:00
|
|
|
DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorTest, reporter, ctxInfo) {
|
2021-04-08 15:39:55 +00:00
|
|
|
auto dContext = ctxInfo.directContext();
|
|
|
|
const GrCaps* caps = dContext->priv().caps();
|
2017-07-18 18:49:38 +00:00
|
|
|
|
2017-07-21 15:38:13 +00:00
|
|
|
struct TestCase {
|
|
|
|
ProxyParams fP1;
|
|
|
|
ProxyParams fP2;
|
|
|
|
bool fExpectation;
|
|
|
|
};
|
|
|
|
|
|
|
|
constexpr bool kShare = true;
|
|
|
|
constexpr bool kDontShare = false;
|
2019-03-26 18:50:08 +00:00
|
|
|
|
2021-04-09 17:43:32 +00:00
|
|
|
// Non-RT GrSurfaces are never recycled on some platforms.
|
|
|
|
bool kConditionallyShare = caps->reuseScratchTextures();
|
|
|
|
|
|
|
|
static const TestCase overlappingTests[] = {
|
|
|
|
// Two proxies with overlapping intervals and compatible descriptors should never share
|
|
|
|
// RT version
|
|
|
|
{{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kDontShare},
|
|
|
|
// non-RT version
|
|
|
|
{{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kDontShare},
|
2017-07-21 15:38:13 +00:00
|
|
|
};
|
|
|
|
|
2021-04-09 17:43:32 +00:00
|
|
|
for (size_t i = 0; i < SK_ARRAY_COUNT(overlappingTests); i++) {
|
2021-04-09 21:58:14 +00:00
|
|
|
const TestCase& test = overlappingTests[i];
|
2021-04-09 17:43:32 +00:00
|
|
|
sk_sp<GrSurfaceProxy> p1 = make_proxy(dContext, test.fP1);
|
|
|
|
sk_sp<GrSurfaceProxy> p2 = make_proxy(dContext, test.fP2);
|
2021-03-19 16:27:49 +00:00
|
|
|
reporter->push(SkStringPrintf("case %d", SkToInt(i)));
|
2021-04-08 15:39:55 +00:00
|
|
|
overlap_test(reporter, dContext, std::move(p1), std::move(p2), test.fExpectation);
|
2021-03-19 16:27:49 +00:00
|
|
|
reporter->pop();
|
2017-07-21 15:38:13 +00:00
|
|
|
}
|
|
|
|
|
2019-08-07 19:52:37 +00:00
|
|
|
auto beFormat = caps->getDefaultBackendFormat(GrColorType::kRGBA_8888, GrRenderable::kYes);
|
2021-04-08 15:39:55 +00:00
|
|
|
int k2 = caps->getRenderTargetSampleCount(2, beFormat);
|
|
|
|
int k4 = caps->getRenderTargetSampleCount(4, beFormat);
|
2017-07-21 15:38:13 +00:00
|
|
|
|
2021-04-09 17:43:32 +00:00
|
|
|
static const TestCase nonOverlappingTests[] = {
|
|
|
|
// Two non-overlapping intervals w/ compatible proxies should share
|
|
|
|
// both same size & approx
|
|
|
|
{{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kShare},
|
|
|
|
{{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kConditionallyShare},
|
|
|
|
// diffs sizes but still approx
|
|
|
|
{{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{50, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kShare},
|
|
|
|
{{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{50, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kConditionallyShare},
|
|
|
|
// sames sizes but exact
|
|
|
|
{{64, kRT, kRGBA, kE, 1, kNotB, kDeferred},
|
|
|
|
{64, kRT, kRGBA, kE, 1, kNotB, kDeferred},
|
|
|
|
kShare},
|
|
|
|
{{64, kNotRT, kRGBA, kE, 1, kNotB, kDeferred},
|
|
|
|
{64, kNotRT, kRGBA, kE, 1, kNotB, kDeferred},
|
|
|
|
kConditionallyShare},
|
|
|
|
// Two non-overlapping intervals w/ different exact sizes should not share
|
|
|
|
{{56, kRT, kRGBA, kE, 1, kNotB, kDeferred},
|
|
|
|
{54, kRT, kRGBA, kE, 1, kNotB, kDeferred},
|
|
|
|
kDontShare},
|
|
|
|
// Two non-overlapping intervals w/ _very different_ approx sizes should not share
|
|
|
|
{{255, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{127, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kDontShare},
|
|
|
|
// Two non-overlapping intervals w/ different MSAA sample counts should not share
|
|
|
|
{{64, kRT, kRGBA, kA, k2, kNotB, kDeferred},
|
|
|
|
{64, kRT, kRGBA, kA, k4, kNotB, kDeferred},
|
|
|
|
k2 == k4},
|
|
|
|
// Two non-overlapping intervals w/ different configs should not share
|
|
|
|
{{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{64, kRT, kAlpha, kA, 1, kNotB, kDeferred},
|
|
|
|
kDontShare},
|
|
|
|
// Two non-overlapping intervals w/ different RT classifications should never share
|
|
|
|
{{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kDontShare},
|
|
|
|
{{64, kNotRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kDontShare},
|
|
|
|
// Two non-overlapping intervals w/ different origins should share
|
|
|
|
{{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
{64, kRT, kRGBA, kA, 1, kNotB, kDeferred},
|
|
|
|
kShare},
|
|
|
|
// Wrapped backend textures should never be reused
|
|
|
|
{{64, kNotRT, kRGBA, kE, 1, kNotB, kBackend},
|
|
|
|
{64, kNotRT, kRGBA, kE, 1, kNotB, kDeferred},
|
|
|
|
kDontShare}
|
2017-07-21 15:38:13 +00:00
|
|
|
};
|
|
|
|
|
2021-04-09 17:43:32 +00:00
|
|
|
for (size_t i = 0; i < SK_ARRAY_COUNT(nonOverlappingTests); i++) {
|
2021-04-09 21:58:14 +00:00
|
|
|
const TestCase& test = nonOverlappingTests[i];
|
2021-04-09 17:43:32 +00:00
|
|
|
sk_sp<GrSurfaceProxy> p1 = make_proxy(dContext, test.fP1);
|
|
|
|
sk_sp<GrSurfaceProxy> p2 = make_proxy(dContext, test.fP2);
|
2018-07-18 17:56:48 +00:00
|
|
|
|
2017-07-21 15:38:13 +00:00
|
|
|
if (!p1 || !p2) {
|
2021-04-09 17:43:32 +00:00
|
|
|
continue; // creation can fail (e.g., for msaa4 on iOS)
|
2017-07-21 15:38:13 +00:00
|
|
|
}
|
2018-07-18 17:56:48 +00:00
|
|
|
|
2021-03-19 16:27:49 +00:00
|
|
|
reporter->push(SkStringPrintf("case %d", SkToInt(i)));
|
2021-04-08 15:39:55 +00:00
|
|
|
non_overlap_test(reporter, dContext, std::move(p1), std::move(p2),
|
2019-06-18 21:15:04 +00:00
|
|
|
test.fExpectation);
|
2021-03-19 16:27:49 +00:00
|
|
|
reporter->pop();
|
2017-07-21 15:38:13 +00:00
|
|
|
}
|
2017-07-18 18:49:38 +00:00
|
|
|
}
|
2018-08-21 17:52:09 +00:00
|
|
|
|
2020-07-20 19:00:36 +00:00
|
|
|
static void draw(GrRecordingContext* rContext) {
|
2018-08-21 17:52:09 +00:00
|
|
|
SkImageInfo ii = SkImageInfo::Make(1024, 1024, kRGBA_8888_SkColorType, kPremul_SkAlphaType);
|
|
|
|
|
2020-07-20 19:00:36 +00:00
|
|
|
sk_sp<SkSurface> s = SkSurface::MakeRenderTarget(rContext, SkBudgeted::kYes,
|
2018-08-21 17:52:09 +00:00
|
|
|
ii, 1, kTopLeft_GrSurfaceOrigin, nullptr);
|
|
|
|
|
|
|
|
SkCanvas* c = s->getCanvas();
|
|
|
|
|
|
|
|
c->clear(SK_ColorBLACK);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorStressTest, reporter, ctxInfo) {
|
2020-07-06 14:56:46 +00:00
|
|
|
auto context = ctxInfo.directContext();
|
2018-08-21 17:52:09 +00:00
|
|
|
|
2019-09-03 14:29:20 +00:00
|
|
|
size_t maxBytes = context->getResourceCacheLimit();
|
2018-08-21 17:52:09 +00:00
|
|
|
|
2019-09-03 14:29:20 +00:00
|
|
|
context->setResourceCacheLimit(0); // We'll always be overbudget
|
2018-08-21 17:52:09 +00:00
|
|
|
|
|
|
|
draw(context);
|
|
|
|
draw(context);
|
|
|
|
draw(context);
|
|
|
|
draw(context);
|
2020-05-14 19:45:44 +00:00
|
|
|
context->flushAndSubmit();
|
2018-08-21 17:52:09 +00:00
|
|
|
|
2019-09-03 14:29:20 +00:00
|
|
|
context->setResourceCacheLimit(maxBytes);
|
2018-08-21 17:52:09 +00:00
|
|
|
}
|
2021-04-09 21:58:14 +00:00
|
|
|
|
|
|
|
struct Interval {
|
|
|
|
ProxyParams fParams;
|
|
|
|
int fStart;
|
|
|
|
int fEnd;
|
|
|
|
sk_sp<GrSurfaceProxy> fProxy = nullptr;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct TestCase {
|
|
|
|
const char * fName;
|
|
|
|
bool fShouldFit;
|
|
|
|
size_t fBudget;
|
|
|
|
SkTArray<ProxyParams> fPurgeableResourcesInCache = {};
|
|
|
|
SkTArray<ProxyParams> fUnpurgeableResourcesInCache = {};
|
|
|
|
SkTArray<Interval> fIntervals;
|
|
|
|
};
|
|
|
|
|
|
|
|
static void memory_budget_test(skiatest::Reporter* reporter,
|
|
|
|
GrDirectContext* dContext,
|
|
|
|
const TestCase& test) {
|
|
|
|
// Reset cache.
|
|
|
|
auto cache = dContext->priv().getResourceCache();
|
|
|
|
cache->releaseAll();
|
|
|
|
cache->setLimit(test.fBudget);
|
|
|
|
|
|
|
|
// Add purgeable entries.
|
|
|
|
size_t expectedPurgeableBytes = 0;
|
|
|
|
SkTArray<sk_sp<GrSurface>> purgeableSurfaces;
|
|
|
|
for (auto& params : test.fPurgeableResourcesInCache) {
|
|
|
|
SkASSERT(params.fKind == kInstantiated);
|
|
|
|
sk_sp<GrSurfaceProxy> proxy = make_proxy(dContext, params);
|
|
|
|
REPORTER_ASSERT(reporter, proxy->peekSurface());
|
|
|
|
expectedPurgeableBytes += proxy->gpuMemorySize();
|
|
|
|
purgeableSurfaces.push_back(sk_ref_sp(proxy->peekSurface()));
|
|
|
|
}
|
|
|
|
purgeableSurfaces.reset();
|
|
|
|
REPORTER_ASSERT(reporter, expectedPurgeableBytes == cache->getPurgeableBytes(),
|
|
|
|
"%zu", cache->getPurgeableBytes());
|
|
|
|
|
|
|
|
// Add unpurgeable entries.
|
|
|
|
size_t expectedUnpurgeableBytes = 0;
|
|
|
|
SkTArray<sk_sp<GrSurface>> unpurgeableSurfaces;
|
|
|
|
for (auto& params : test.fUnpurgeableResourcesInCache) {
|
|
|
|
SkASSERT(params.fKind == kInstantiated);
|
|
|
|
sk_sp<GrSurfaceProxy> proxy = make_proxy(dContext, params);
|
|
|
|
REPORTER_ASSERT(reporter, proxy->peekSurface());
|
|
|
|
expectedUnpurgeableBytes += proxy->gpuMemorySize();
|
|
|
|
unpurgeableSurfaces.push_back(sk_ref_sp(proxy->peekSurface()));
|
|
|
|
}
|
|
|
|
|
|
|
|
auto unpurgeableBytes = cache->getBudgetedResourceBytes() - cache->getPurgeableBytes();
|
|
|
|
REPORTER_ASSERT(reporter, expectedUnpurgeableBytes == unpurgeableBytes,
|
|
|
|
"%zu", unpurgeableBytes);
|
|
|
|
|
|
|
|
// Add intervals and test.
|
2021-04-15 15:01:46 +00:00
|
|
|
GrResourceAllocator alloc(dContext);
|
2021-04-09 21:58:14 +00:00
|
|
|
for (auto& interval : test.fIntervals) {
|
|
|
|
for (int i = interval.fStart; i <= interval.fEnd; i++) {
|
|
|
|
alloc.incOps();
|
|
|
|
}
|
|
|
|
alloc.addInterval(interval.fProxy.get(), interval.fStart, interval.fEnd,
|
|
|
|
GrResourceAllocator::ActualUse::kYes);
|
|
|
|
}
|
|
|
|
REPORTER_ASSERT(reporter, alloc.planAssignment());
|
|
|
|
REPORTER_ASSERT(reporter, alloc.makeBudgetHeadroom() == test.fShouldFit);
|
|
|
|
}
|
|
|
|
|
|
|
|
DEF_GPUTEST_FOR_RENDERING_CONTEXTS(ResourceAllocatorMemoryBudgetTest, reporter, ctxInfo) {
|
|
|
|
auto dContext = ctxInfo.directContext();
|
|
|
|
|
|
|
|
constexpr bool kUnder = true;
|
|
|
|
constexpr bool kOver = false;
|
|
|
|
constexpr size_t kRGBA64Bytes = 4 * 64 * 64;
|
|
|
|
const ProxyParams kProxy64 = {64, kRT, kRGBA, kE, 1, kB, kDeferred};
|
|
|
|
const ProxyParams kProxy64NotBudgeted = {64, kRT, kRGBA, kE, 1, kNotB, kDeferred};
|
|
|
|
const ProxyParams kProxy64Lazy = {64, kRT, kRGBA, kE, 1, kB, kLazy};
|
|
|
|
const ProxyParams kProxy64FullyLazy = {64, kRT, kRGBA, kE, 1, kB, kFullyLazy};
|
|
|
|
const ProxyParams kProxy32Instantiated = {32, kRT, kRGBA, kE, 1, kB, kInstantiated};
|
|
|
|
const ProxyParams kProxy64Instantiated = {64, kRT, kRGBA, kE, 1, kB, kInstantiated};
|
|
|
|
|
|
|
|
TestCase tests[] = {
|
|
|
|
{"empty DAG", kUnder, 0, {}, {}, {}},
|
|
|
|
{"unbudgeted", kUnder, 0, {}, {}, {{kProxy64NotBudgeted, 0, 2}}},
|
|
|
|
{"basic", kUnder, kRGBA64Bytes, {}, {}, {{kProxy64, 0, 2}}},
|
|
|
|
{"basic, over", kOver, kRGBA64Bytes - 1, {}, {}, {{kProxy64, 0, 2}}},
|
|
|
|
{"shared", kUnder, kRGBA64Bytes, {}, {},
|
|
|
|
{
|
|
|
|
{kProxy64, 0, 2},
|
|
|
|
{kProxy64, 3, 5},
|
|
|
|
}},
|
|
|
|
{"retrieved from cache", kUnder, kRGBA64Bytes,
|
|
|
|
/* purgeable */{kProxy64Instantiated},
|
|
|
|
/* unpurgeable */{},
|
|
|
|
{
|
|
|
|
{kProxy64, 0, 2}
|
|
|
|
}},
|
|
|
|
{"purge 4", kUnder, kRGBA64Bytes,
|
|
|
|
/* purgeable */{
|
|
|
|
kProxy32Instantiated,
|
|
|
|
kProxy32Instantiated,
|
|
|
|
kProxy32Instantiated,
|
|
|
|
kProxy32Instantiated
|
|
|
|
},
|
|
|
|
/* unpurgeable */{},
|
|
|
|
{
|
|
|
|
{kProxy64, 0, 2}
|
|
|
|
}},
|
|
|
|
{"dont purge what we've reserved", kOver, kRGBA64Bytes,
|
|
|
|
/* purgeable */{kProxy64Instantiated},
|
|
|
|
/* unpurgeable */{},
|
|
|
|
{
|
|
|
|
{kProxy64, 0, 2},
|
|
|
|
{kProxy64, 1, 3}
|
|
|
|
}},
|
|
|
|
{"unpurgeable", kOver, kRGBA64Bytes,
|
|
|
|
/* purgeable */{},
|
|
|
|
/* unpurgeable */{kProxy64Instantiated},
|
|
|
|
{
|
|
|
|
{kProxy64, 0, 2}
|
|
|
|
}},
|
|
|
|
{"lazy", kUnder, kRGBA64Bytes,
|
|
|
|
/* purgeable */{},
|
|
|
|
/* unpurgeable */{},
|
|
|
|
{
|
|
|
|
{kProxy64Lazy, 0, 2}
|
|
|
|
}},
|
|
|
|
{"lazy, over", kOver, kRGBA64Bytes - 1,
|
|
|
|
/* purgeable */{},
|
|
|
|
/* unpurgeable */{},
|
|
|
|
{
|
|
|
|
{kProxy64Lazy, 0, 2}
|
|
|
|
}},
|
|
|
|
{"fully-lazy", kUnder, kRGBA64Bytes,
|
|
|
|
/* purgeable */{},
|
|
|
|
/* unpurgeable */{},
|
|
|
|
{
|
|
|
|
{kProxy64FullyLazy, 0, 2}
|
|
|
|
}},
|
|
|
|
{"fully-lazy, over", kOver, kRGBA64Bytes - 1,
|
|
|
|
/* purgeable */{},
|
|
|
|
/* unpurgeable */{},
|
|
|
|
{
|
|
|
|
{kProxy64FullyLazy, 0, 2}
|
|
|
|
}},
|
|
|
|
};
|
|
|
|
SkString match("");
|
|
|
|
for (size_t i = 0; i < SK_ARRAY_COUNT(tests); i++) {
|
|
|
|
TestCase& test = tests[i];
|
|
|
|
if (match.isEmpty() || match == SkString(test.fName)) {
|
|
|
|
// Create proxies
|
|
|
|
for (Interval& interval : test.fIntervals) {
|
|
|
|
interval.fProxy = make_proxy(dContext, interval.fParams);
|
|
|
|
}
|
|
|
|
reporter->push(SkString(test.fName));
|
|
|
|
memory_budget_test(reporter, dContext, test);
|
|
|
|
reporter->pop();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|