2017-10-06 22:27:32 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2017 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include "SkTypes.h"
|
|
|
|
#include "Test.h"
|
|
|
|
|
|
|
|
#include "GrContext.h"
|
|
|
|
#include "GrContextPriv.h"
|
|
|
|
#include "GrClip.h"
|
2017-11-04 21:22:22 +00:00
|
|
|
#include "GrDrawingManager.h"
|
|
|
|
#include "GrPathRenderer.h"
|
|
|
|
#include "GrPaint.h"
|
2017-10-06 22:27:32 +00:00
|
|
|
#include "GrRenderTargetContext.h"
|
|
|
|
#include "GrRenderTargetContextPriv.h"
|
|
|
|
#include "GrShape.h"
|
2018-06-18 15:51:36 +00:00
|
|
|
#include "GrTexture.h"
|
2017-10-06 22:27:32 +00:00
|
|
|
#include "SkMatrix.h"
|
2017-11-04 21:22:22 +00:00
|
|
|
#include "SkPathPriv.h"
|
2017-10-06 22:27:32 +00:00
|
|
|
#include "SkRect.h"
|
2018-06-18 15:51:36 +00:00
|
|
|
#include "sk_tool_utils.h"
|
2017-10-06 22:27:32 +00:00
|
|
|
#include "ccpr/GrCoverageCountingPathRenderer.h"
|
2017-11-04 21:22:22 +00:00
|
|
|
#include "mock/GrMockTypes.h"
|
2017-10-06 22:27:32 +00:00
|
|
|
#include <cmath>
|
|
|
|
|
|
|
|
static constexpr int kCanvasSize = 100;
|
|
|
|
|
2017-12-05 17:05:21 +00:00
|
|
|
class CCPRClip : public GrClip {
|
|
|
|
public:
|
|
|
|
CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
|
|
|
|
|
|
|
|
private:
|
2018-01-17 16:40:14 +00:00
|
|
|
bool apply(GrContext* context, GrRenderTargetContext* rtc, bool, bool, GrAppliedClip* out,
|
2017-12-05 17:05:21 +00:00
|
|
|
SkRect* bounds) const override {
|
2018-06-16 23:22:59 +00:00
|
|
|
out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath,
|
2017-12-05 17:05:21 +00:00
|
|
|
SkIRect::MakeWH(rtc->width(), rtc->height()),
|
2018-06-16 23:22:59 +00:00
|
|
|
rtc->width(), rtc->height(),
|
|
|
|
*context->contextPriv().caps()));
|
2017-12-05 17:05:21 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
bool quickContains(const SkRect&) const final { return false; }
|
|
|
|
bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
|
|
|
|
void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final {
|
|
|
|
rect->set(0, 0, width, height);
|
|
|
|
if (iior) {
|
|
|
|
*iior = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
GrCoverageCountingPathRenderer* const fCCPR;
|
|
|
|
const SkPath fPath;
|
|
|
|
};
|
|
|
|
|
2017-10-06 22:27:32 +00:00
|
|
|
class CCPRPathDrawer {
|
|
|
|
public:
|
2017-11-04 21:22:22 +00:00
|
|
|
CCPRPathDrawer(GrContext* ctx, skiatest::Reporter* reporter)
|
2017-10-06 22:27:32 +00:00
|
|
|
: fCtx(ctx)
|
2017-11-04 21:22:22 +00:00
|
|
|
, fCCPR(fCtx->contextPriv().drawingManager()->getCoverageCountingPathRenderer())
|
2018-03-06 13:20:37 +00:00
|
|
|
, fRTC(fCtx->contextPriv().makeDeferredRenderTargetContext(
|
|
|
|
SkBackingFit::kExact, kCanvasSize,
|
2017-10-06 22:27:32 +00:00
|
|
|
kCanvasSize, kRGBA_8888_GrPixelConfig,
|
|
|
|
nullptr)) {
|
2017-11-04 21:22:22 +00:00
|
|
|
if (!fCCPR) {
|
|
|
|
ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
|
2017-10-06 22:27:32 +00:00
|
|
|
}
|
2017-11-04 21:22:22 +00:00
|
|
|
if (!fRTC) {
|
|
|
|
ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
|
2017-10-06 22:27:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-18 15:51:36 +00:00
|
|
|
GrContext* ctx() const { return fCtx; }
|
|
|
|
GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
|
|
|
|
|
2017-11-04 21:22:22 +00:00
|
|
|
bool valid() const { return fCCPR && fRTC; }
|
2017-12-11 22:42:09 +00:00
|
|
|
void clear() const { fRTC->clear(nullptr, 0, GrRenderTargetContext::CanClearFullscreen::kYes); }
|
2017-11-04 21:22:22 +00:00
|
|
|
void abandonGrContext() { fCtx = nullptr; fCCPR = nullptr; fRTC = nullptr; }
|
2017-10-06 22:27:32 +00:00
|
|
|
|
2018-06-24 19:08:57 +00:00
|
|
|
void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
|
2017-11-04 21:22:22 +00:00
|
|
|
SkASSERT(this->valid());
|
2017-10-06 22:27:32 +00:00
|
|
|
|
|
|
|
GrPaint paint;
|
2018-06-18 15:51:36 +00:00
|
|
|
paint.setColor4f(GrColor4f(0, 1, 0, 1));
|
2017-11-04 21:22:22 +00:00
|
|
|
|
2017-10-06 22:27:32 +00:00
|
|
|
GrNoClip noClip;
|
|
|
|
SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
|
2017-11-04 21:22:22 +00:00
|
|
|
|
2017-10-06 22:27:32 +00:00
|
|
|
GrShape shape(path);
|
2017-11-04 21:22:22 +00:00
|
|
|
|
2018-06-24 19:08:57 +00:00
|
|
|
fCCPR->testingOnly_drawPathDirectly({
|
|
|
|
fCtx, std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
|
|
|
|
&clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
|
2017-10-06 22:27:32 +00:00
|
|
|
}
|
|
|
|
|
2017-12-05 17:05:21 +00:00
|
|
|
void clipFullscreenRect(SkPath clipPath, GrColor4f color = GrColor4f(0, 1, 0, 1)) {
|
|
|
|
SkASSERT(this->valid());
|
|
|
|
|
|
|
|
GrPaint paint;
|
|
|
|
paint.setColor4f(color);
|
|
|
|
|
|
|
|
fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
|
|
|
|
SkRect::MakeIWH(kCanvasSize, kCanvasSize));
|
|
|
|
}
|
|
|
|
|
2017-11-04 21:22:22 +00:00
|
|
|
void flush() const {
|
|
|
|
SkASSERT(this->valid());
|
2017-10-06 22:27:32 +00:00
|
|
|
fCtx->flush();
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2018-06-18 15:51:36 +00:00
|
|
|
GrContext* fCtx;
|
|
|
|
GrCoverageCountingPathRenderer* fCCPR;
|
|
|
|
sk_sp<GrRenderTargetContext> fRTC;
|
2017-10-06 22:27:32 +00:00
|
|
|
};
|
|
|
|
|
2017-11-04 21:22:22 +00:00
|
|
|
class CCPRTest {
|
|
|
|
public:
|
|
|
|
void run(skiatest::Reporter* reporter) {
|
|
|
|
GrMockOptions mockOptions;
|
|
|
|
mockOptions.fInstanceAttribSupport = true;
|
|
|
|
mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
|
2018-02-03 01:32:49 +00:00
|
|
|
mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fRenderability =
|
|
|
|
GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
|
2017-11-04 21:22:22 +00:00
|
|
|
mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fTexturable = true;
|
2018-06-18 15:51:36 +00:00
|
|
|
mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fRenderability =
|
|
|
|
GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
|
|
|
|
mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fTexturable = true;
|
2017-11-04 21:22:22 +00:00
|
|
|
mockOptions.fGeometryShaderSupport = true;
|
|
|
|
mockOptions.fIntegerSupport = true;
|
|
|
|
mockOptions.fFlatInterpolationSupport = true;
|
|
|
|
|
|
|
|
GrContextOptions ctxOptions;
|
|
|
|
ctxOptions.fAllowPathMaskCaching = false;
|
|
|
|
ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
|
|
|
|
|
2018-06-24 19:08:57 +00:00
|
|
|
this->customizeOptions(&mockOptions, &ctxOptions);
|
|
|
|
|
2017-11-04 21:22:22 +00:00
|
|
|
fMockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
|
|
|
|
if (!fMockContext) {
|
|
|
|
ERRORF(reporter, "could not create mock context");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!fMockContext->unique()) {
|
|
|
|
ERRORF(reporter, "mock context is not unique");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
CCPRPathDrawer ccpr(fMockContext.get(), reporter);
|
|
|
|
if (!ccpr.valid()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
fPath.moveTo(0, 0);
|
|
|
|
fPath.cubicTo(50, 50, 0, 50, 50, 0);
|
|
|
|
this->onRun(reporter, ccpr);
|
2017-10-06 22:27:32 +00:00
|
|
|
}
|
|
|
|
|
2017-11-04 21:22:22 +00:00
|
|
|
virtual ~CCPRTest() {}
|
|
|
|
|
|
|
|
protected:
|
2018-06-24 19:08:57 +00:00
|
|
|
virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
|
2017-11-04 21:22:22 +00:00
|
|
|
virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
|
|
|
|
|
2018-06-18 15:51:36 +00:00
|
|
|
sk_sp<GrContext> fMockContext;
|
|
|
|
SkPath fPath;
|
2017-11-04 21:22:22 +00:00
|
|
|
};
|
|
|
|
|
2017-11-15 20:48:03 +00:00
|
|
|
#define DEF_CCPR_TEST(name) \
|
|
|
|
DEF_GPUTEST(name, reporter, /* options */) { \
|
|
|
|
name test; \
|
|
|
|
test.run(reporter); \
|
2017-10-06 22:27:32 +00:00
|
|
|
}
|
|
|
|
|
2017-11-04 21:22:22 +00:00
|
|
|
class GrCCPRTest_cleanup : public CCPRTest {
|
|
|
|
void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
|
|
|
|
REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
|
|
|
|
// Ensure paths get unreffed.
|
|
|
|
for (int i = 0; i < 10; ++i) {
|
|
|
|
ccpr.drawPath(fPath);
|
2018-05-21 15:10:53 +00:00
|
|
|
}
|
|
|
|
REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
ccpr.flush();
|
|
|
|
REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
|
|
|
|
// Ensure clip paths get unreffed.
|
|
|
|
for (int i = 0; i < 10; ++i) {
|
2017-12-05 17:05:21 +00:00
|
|
|
ccpr.clipFullscreenRect(fPath);
|
2017-11-04 21:22:22 +00:00
|
|
|
}
|
|
|
|
REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
ccpr.flush();
|
|
|
|
REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
|
|
|
|
// Ensure paths get unreffed when we delete the context without flushing.
|
|
|
|
for (int i = 0; i < 10; ++i) {
|
|
|
|
ccpr.drawPath(fPath);
|
2017-12-05 17:05:21 +00:00
|
|
|
ccpr.clipFullscreenRect(fPath);
|
2017-11-04 21:22:22 +00:00
|
|
|
}
|
|
|
|
ccpr.abandonGrContext();
|
|
|
|
REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
fMockContext.reset();
|
|
|
|
REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
DEF_CCPR_TEST(GrCCPRTest_cleanup)
|
|
|
|
|
2018-04-18 19:24:25 +00:00
|
|
|
class GrCCPRTest_cleanupWithTexAllocFail : public GrCCPRTest_cleanup {
|
2018-06-24 19:08:57 +00:00
|
|
|
void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
|
|
|
|
mockOptions->fFailTextureAllocations = true;
|
2018-04-18 19:24:25 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
DEF_CCPR_TEST(GrCCPRTest_cleanupWithTexAllocFail)
|
|
|
|
|
2017-11-06 21:19:19 +00:00
|
|
|
class GrCCPRTest_unregisterCulledOps : public CCPRTest {
|
|
|
|
void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
|
|
|
|
REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
|
|
|
|
// Ensure Ops get unregistered from CCPR when culled early.
|
|
|
|
ccpr.drawPath(fPath);
|
|
|
|
REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
ccpr.clear(); // Clear should delete the CCPR Op.
|
|
|
|
REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
|
|
|
|
|
|
|
|
// Ensure Op unregisters work when we delete the context without flushing.
|
|
|
|
ccpr.drawPath(fPath);
|
|
|
|
REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
|
|
|
|
REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
ccpr.abandonGrContext();
|
|
|
|
fMockContext.reset(); // Should not crash (DrawPathsOp should have unregistered itself).
|
|
|
|
}
|
|
|
|
};
|
|
|
|
DEF_CCPR_TEST(GrCCPRTest_unregisterCulledOps)
|
|
|
|
|
2017-11-27 22:34:26 +00:00
|
|
|
class GrCCPRTest_parseEmptyPath : public CCPRTest {
|
|
|
|
void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
|
|
|
|
REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
|
|
|
|
|
|
|
|
// Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
|
|
|
|
// an empty path.
|
|
|
|
SkPath largeOutsidePath;
|
|
|
|
largeOutsidePath.moveTo(-1e30f, -1e30f);
|
|
|
|
largeOutsidePath.lineTo(-1e30f, +1e30f);
|
|
|
|
largeOutsidePath.lineTo(-1e10f, +1e30f);
|
|
|
|
ccpr.drawPath(largeOutsidePath);
|
|
|
|
|
|
|
|
// Normally an empty path is culled before reaching ccpr, however we use a back door for
|
|
|
|
// testing so this path will make it.
|
|
|
|
SkPath emptyPath;
|
|
|
|
SkASSERT(emptyPath.isEmpty());
|
|
|
|
ccpr.drawPath(emptyPath);
|
|
|
|
|
|
|
|
// This is the test. It will exercise various internal asserts and verify we do not crash.
|
|
|
|
ccpr.flush();
|
2017-12-05 17:05:21 +00:00
|
|
|
|
|
|
|
// Now try again with clips.
|
|
|
|
ccpr.clipFullscreenRect(largeOutsidePath);
|
|
|
|
ccpr.clipFullscreenRect(emptyPath);
|
|
|
|
ccpr.flush();
|
|
|
|
|
|
|
|
// ... and both.
|
|
|
|
ccpr.drawPath(largeOutsidePath);
|
|
|
|
ccpr.clipFullscreenRect(largeOutsidePath);
|
|
|
|
ccpr.drawPath(emptyPath);
|
|
|
|
ccpr.clipFullscreenRect(emptyPath);
|
|
|
|
ccpr.flush();
|
2017-11-27 22:34:26 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
DEF_CCPR_TEST(GrCCPRTest_parseEmptyPath)
|
|
|
|
|
2018-06-18 15:51:36 +00:00
|
|
|
// This test exercises CCPR's cache capabilities by drawing many paths with two different
|
|
|
|
// transformation matrices. We then vary the matrices independently by whole and partial pixels,
|
|
|
|
// and verify the caching behaved as expected.
|
|
|
|
class GrCCPRTest_cache : public CCPRTest {
|
2018-06-24 19:08:57 +00:00
|
|
|
void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
|
|
|
|
ctxOptions->fAllowPathMaskCaching = true;
|
|
|
|
}
|
|
|
|
|
2018-06-18 15:51:36 +00:00
|
|
|
void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
|
|
|
|
static constexpr int kPathSize = 20;
|
|
|
|
SkRandom rand;
|
|
|
|
|
2018-06-22 17:43:31 +00:00
|
|
|
SkPath paths[300];
|
2018-06-18 15:51:36 +00:00
|
|
|
int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
|
|
|
|
for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) {
|
|
|
|
int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1,
|
|
|
|
GrShape::kMaxKeyFromDataVerbCnt * 2);
|
|
|
|
paths[i] = sk_tool_utils::make_star(SkRect::MakeIWH(kPathSize, kPathSize), numPts,
|
|
|
|
primes[rand.nextU() % SK_ARRAY_COUNT(primes)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
SkMatrix matrices[2] = {
|
|
|
|
SkMatrix::MakeTrans(5, 5),
|
|
|
|
SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
|
|
|
|
};
|
|
|
|
|
|
|
|
int firstAtlasID = -1;
|
|
|
|
|
2018-06-22 17:43:31 +00:00
|
|
|
for (int iterIdx = 0; iterIdx < 10; ++iterIdx) {
|
|
|
|
static constexpr int kNumHitsBeforeStash = 2;
|
|
|
|
static const GrUniqueKey gInvalidUniqueKey;
|
|
|
|
|
|
|
|
// Draw all the paths then flush. Repeat until a new stash occurs.
|
|
|
|
const GrUniqueKey* stashedAtlasKey = &gInvalidUniqueKey;
|
|
|
|
for (int j = 0; j < kNumHitsBeforeStash; ++j) {
|
|
|
|
// Nothing should be stashed until its hit count reaches kNumHitsBeforeStash.
|
|
|
|
REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid());
|
|
|
|
|
|
|
|
for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) {
|
2018-06-24 19:08:57 +00:00
|
|
|
ccpr.drawPath(paths[i], matrices[i % 2]);
|
2018-06-22 17:43:31 +00:00
|
|
|
}
|
|
|
|
ccpr.flush();
|
|
|
|
|
|
|
|
stashedAtlasKey = &ccpr.ccpr()->testingOnly_getStashedAtlasKey();
|
2018-06-18 15:51:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Figure out the mock backend ID of the atlas texture stashed away by CCPR.
|
|
|
|
GrMockTextureInfo stashedAtlasInfo;
|
|
|
|
stashedAtlasInfo.fID = -1;
|
2018-06-22 17:43:31 +00:00
|
|
|
if (stashedAtlasKey->isValid()) {
|
2018-06-18 15:51:36 +00:00
|
|
|
GrResourceProvider* rp = ccpr.ctx()->contextPriv().resourceProvider();
|
2018-06-22 17:43:31 +00:00
|
|
|
sk_sp<GrSurface> stashedAtlas = rp->findByUniqueKey<GrSurface>(*stashedAtlasKey);
|
2018-06-18 15:51:36 +00:00
|
|
|
REPORTER_ASSERT(reporter, stashedAtlas);
|
|
|
|
if (stashedAtlas) {
|
|
|
|
const auto& backendTexture = stashedAtlas->asTexture()->getBackendTexture();
|
|
|
|
backendTexture.getMockTextureInfo(&stashedAtlasInfo);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-22 17:43:31 +00:00
|
|
|
if (0 == iterIdx) {
|
|
|
|
// First iteration: just note the ID of the stashed atlas and continue.
|
|
|
|
REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
|
2018-06-18 15:51:36 +00:00
|
|
|
firstAtlasID = stashedAtlasInfo.fID;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2018-06-22 17:43:31 +00:00
|
|
|
switch (iterIdx % 3) {
|
2018-06-18 15:51:36 +00:00
|
|
|
case 1:
|
|
|
|
// This draw should have gotten 100% cache hits; we only did integer translates
|
|
|
|
// last time (or none if it was the first flush). Therefore, no atlas should
|
|
|
|
// have been stashed away.
|
2018-06-22 17:43:31 +00:00
|
|
|
REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid());
|
2018-06-18 15:51:36 +00:00
|
|
|
|
|
|
|
// Invalidate even path masks.
|
|
|
|
matrices[0].preTranslate(1.6f, 1.4f);
|
|
|
|
break;
|
|
|
|
|
|
|
|
case 2:
|
|
|
|
// Even path masks were invalidated last iteration by a subpixel translate. They
|
|
|
|
// should have been re-rendered this time and stashed away in the CCPR atlas.
|
2018-06-22 17:43:31 +00:00
|
|
|
REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
|
2018-06-18 15:51:36 +00:00
|
|
|
|
|
|
|
// 'firstAtlasID' should be kept as a scratch texture in the resource cache.
|
|
|
|
REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID);
|
|
|
|
|
|
|
|
// Invalidate odd path masks.
|
|
|
|
matrices[1].preTranslate(-1.4f, -1.6f);
|
|
|
|
break;
|
|
|
|
|
|
|
|
case 0:
|
|
|
|
// Odd path masks were invalidated last iteration by a subpixel translate. They
|
|
|
|
// should have been re-rendered this time and stashed away in the CCPR atlas.
|
2018-06-22 17:43:31 +00:00
|
|
|
REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
|
2018-06-18 15:51:36 +00:00
|
|
|
|
|
|
|
// 'firstAtlasID' is the same texture that got stashed away last time (assuming
|
|
|
|
// no assertion failures). So if it also got stashed this time, it means we
|
|
|
|
// first copied the even paths out of it, then recycled the exact same texture
|
|
|
|
// to render the odd paths. This is the expected behavior.
|
|
|
|
REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID);
|
|
|
|
|
|
|
|
// Integer translates: all path masks stay valid.
|
|
|
|
matrices[0].preTranslate(-1, -1);
|
|
|
|
matrices[1].preTranslate(1, 1);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
DEF_CCPR_TEST(GrCCPRTest_cache)
|
|
|
|
|
2017-11-04 21:22:22 +00:00
|
|
|
class CCPRRenderingTest {
|
|
|
|
public:
|
|
|
|
void run(skiatest::Reporter* reporter, GrContext* ctx) const {
|
|
|
|
if (!ctx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) {
|
|
|
|
return; // CCPR is not enabled on this GPU.
|
|
|
|
}
|
|
|
|
CCPRPathDrawer ccpr(ctx, reporter);
|
|
|
|
if (!ccpr.valid()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
this->onRun(reporter, ccpr);
|
2017-10-06 22:27:32 +00:00
|
|
|
}
|
|
|
|
|
2017-11-04 21:22:22 +00:00
|
|
|
virtual ~CCPRRenderingTest() {}
|
|
|
|
|
|
|
|
protected:
|
|
|
|
virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
|
|
|
|
};
|
|
|
|
|
|
|
|
#define DEF_CCPR_RENDERING_TEST(name) \
|
|
|
|
DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
|
|
|
|
name test; \
|
|
|
|
test.run(reporter, ctxInfo.grContext()); \
|
|
|
|
}
|
|
|
|
|
|
|
|
class GrCCPRTest_busyPath : public CCPRRenderingTest {
|
|
|
|
void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
|
|
|
|
static constexpr int kNumBusyVerbs = 1 << 17;
|
|
|
|
ccpr.clear();
|
|
|
|
SkPath busyPath;
|
|
|
|
busyPath.moveTo(0, 0); // top left
|
|
|
|
busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
|
|
|
|
for (int i = 2; i < kNumBusyVerbs; ++i) {
|
|
|
|
float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
|
|
|
|
busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
|
|
|
|
}
|
|
|
|
ccpr.drawPath(busyPath);
|
|
|
|
|
|
|
|
ccpr.flush(); // If this doesn't crash, the test passed.
|
|
|
|
// If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
|
|
|
|
// your platform's GrGLCaps.
|
|
|
|
}
|
|
|
|
};
|
|
|
|
DEF_CCPR_RENDERING_TEST(GrCCPRTest_busyPath)
|