2011-07-28 14:26:00 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2011 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
2013-04-11 16:54:09 +00:00
|
|
|
|
2019-03-20 15:50:33 +00:00
|
|
|
#include <atomic>
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "include/core/SkGraphics.h"
|
|
|
|
#include "include/core/SkTime.h"
|
|
|
|
#include "include/private/SkTArray.h"
|
|
|
|
#include "include/private/SkTemplates.h"
|
|
|
|
#include "src/core/SkOSFile.h"
|
|
|
|
#include "src/core/SkTaskGroup.h"
|
|
|
|
#include "src/pathops/SkPathOpsDebug.h"
|
|
|
|
#include "tests/PathOpsDebug.h"
|
|
|
|
#include "tests/Test.h"
|
|
|
|
#include "tools/CrashHandler.h"
|
|
|
|
#include "tools/OverwriteLine.h"
|
|
|
|
#include "tools/Resources.h"
|
|
|
|
#include "tools/flags/CommandLineFlags.h"
|
|
|
|
#include "tools/gpu/GrContextFactory.h"
|
2012-09-07 18:24:43 +00:00
|
|
|
|
2009-02-27 16:24:51 +00:00
|
|
|
using namespace skiatest;
|
2016-03-31 01:56:19 +00:00
|
|
|
using namespace sk_gpu_test;
|
2009-02-27 16:24:51 +00:00
|
|
|
|
2019-03-21 16:31:36 +00:00
|
|
|
static DEFINE_bool2(dumpOp, d, false, "dump the pathOps to a file to recover mid-crash.");
|
|
|
|
static DEFINE_bool2(extendedTest, x, false, "run extended tests for pathOps.");
|
|
|
|
static DEFINE_bool2(runFail, f, false, "check for success on tests known to fail.");
|
|
|
|
static DEFINE_bool2(verifyOp, y, false, "compare the pathOps result against a region.");
|
|
|
|
static DEFINE_string2(json, J, "", "write json version of tests.");
|
2019-03-25 15:54:59 +00:00
|
|
|
static DEFINE_bool2(verbose, v, false, "enable verbose output from the test driver.");
|
2019-03-22 18:15:11 +00:00
|
|
|
static DEFINE_bool2(veryVerbose, V, false, "tell individual tests to be verbose.");
|
2020-07-27 18:52:19 +00:00
|
|
|
static DEFINE_bool(cpu, true, "Run CPU-bound work?");
|
|
|
|
static DEFINE_bool(gpu, true, "Run GPU-bound work?");
|
2016-10-24 12:10:14 +00:00
|
|
|
|
2019-03-25 15:54:59 +00:00
|
|
|
static DEFINE_string2(match, m, nullptr,
|
|
|
|
"[~][^]substring[$] [...] of name to run.\n"
|
|
|
|
"Multiple matches may be separated by spaces.\n"
|
|
|
|
"~ causes a matching name to always be skipped\n"
|
|
|
|
"^ requires the start of the name to match\n"
|
|
|
|
"$ requires the end of the name to match\n"
|
|
|
|
"^ and $ requires an exact match\n"
|
|
|
|
"If a name does not match any list entry,\n"
|
|
|
|
"it is skipped unless some list entry starts with ~");
|
|
|
|
|
|
|
|
static DEFINE_int_2(threads, j, -1,
|
|
|
|
"Run threadsafe tests on a threadpool with this many extra threads, "
|
|
|
|
"defaulting to one extra thread per core.");
|
|
|
|
|
2016-10-04 14:01:04 +00:00
|
|
|
#if DEBUG_COIN
|
2019-03-21 16:31:36 +00:00
|
|
|
static DEFINE_bool2(coinTest, c, false, "detect unused coincidence algorithms.");
|
2016-10-04 14:01:04 +00:00
|
|
|
#endif
|
2014-01-02 16:19:53 +00:00
|
|
|
|
2009-04-01 18:31:44 +00:00
|
|
|
// need to explicitly declare this, or we get some weird infinite loop llist
|
|
|
|
template TestRegistry* TestRegistry::gHead;
|
2015-10-16 16:03:38 +00:00
|
|
|
void (*gVerboseFinalize)() = nullptr;
|
2009-04-01 18:31:44 +00:00
|
|
|
|
2015-01-20 17:30:20 +00:00
|
|
|
// The threads report back to this object when they are done.
|
|
|
|
class Status {
|
2009-02-27 16:24:51 +00:00
|
|
|
public:
|
2015-01-20 17:30:20 +00:00
|
|
|
explicit Status(int total)
|
|
|
|
: fDone(0), fTestCount(0), fFailCount(0), fTotal(total) {}
|
|
|
|
// Threadsafe.
|
|
|
|
void endTest(const char* testName,
|
|
|
|
bool success,
|
|
|
|
SkMSec elapsed,
|
|
|
|
int testCount) {
|
2018-12-04 16:52:51 +00:00
|
|
|
const int done = ++fDone;
|
|
|
|
fTestCount += testCount;
|
2015-01-20 17:30:20 +00:00
|
|
|
if (!success) {
|
|
|
|
SkDebugf("\n---- %s FAILED", testName);
|
2013-04-22 16:43:07 +00:00
|
|
|
}
|
|
|
|
|
2014-01-02 16:19:53 +00:00
|
|
|
SkString prefix(kSkOverwriteLine);
|
|
|
|
SkString time;
|
|
|
|
if (FLAGS_verbose) {
|
|
|
|
prefix.printf("\n");
|
2015-01-20 17:30:20 +00:00
|
|
|
time.printf("%5dms ", elapsed);
|
2009-04-09 04:06:54 +00:00
|
|
|
}
|
2015-01-20 17:30:20 +00:00
|
|
|
SkDebugf("%s[%3d/%3d] %s%s", prefix.c_str(), done, fTotal, time.c_str(),
|
|
|
|
testName);
|
2009-04-09 04:06:54 +00:00
|
|
|
}
|
2013-04-19 13:24:28 +00:00
|
|
|
|
2018-12-04 16:52:51 +00:00
|
|
|
void reportFailure() { fFailCount++; }
|
2015-01-20 17:30:20 +00:00
|
|
|
|
|
|
|
int32_t testCount() { return fTestCount; }
|
|
|
|
int32_t failCount() { return fFailCount; }
|
|
|
|
|
2012-11-29 16:29:58 +00:00
|
|
|
private:
|
2018-12-04 16:52:51 +00:00
|
|
|
std::atomic<int32_t> fDone;
|
|
|
|
std::atomic<int32_t> fTestCount;
|
|
|
|
std::atomic<int32_t> fFailCount;
|
2014-01-02 16:19:53 +00:00
|
|
|
const int fTotal;
|
2009-02-27 16:24:51 +00:00
|
|
|
};
|
|
|
|
|
2016-02-17 03:06:15 +00:00
|
|
|
class SkTestRunnable {
|
2013-04-19 13:24:28 +00:00
|
|
|
public:
|
2017-11-15 20:48:03 +00:00
|
|
|
SkTestRunnable(const Test& test, Status* status) : fTest(test), fStatus(status) {}
|
|
|
|
|
|
|
|
void operator()() {
|
|
|
|
struct TestReporter : public skiatest::Reporter {
|
|
|
|
public:
|
|
|
|
TestReporter() : fStats(nullptr), fError(false), fTestCount(0) {}
|
|
|
|
void bumpTestCount() override { ++fTestCount; }
|
|
|
|
bool allowExtendedTest() const override { return FLAGS_extendedTest; }
|
|
|
|
bool verbose() const override { return FLAGS_veryVerbose; }
|
|
|
|
void reportFailed(const skiatest::Failure& failure) override {
|
|
|
|
SkDebugf("\nFAILED: %s", failure.toString().c_str());
|
|
|
|
fError = true;
|
|
|
|
}
|
|
|
|
void* stats() const override { return fStats; }
|
|
|
|
void* fStats;
|
|
|
|
bool fError;
|
|
|
|
int fTestCount;
|
|
|
|
} reporter;
|
|
|
|
|
|
|
|
const Timer timer;
|
|
|
|
fTest.proc(&reporter, GrContextOptions());
|
|
|
|
SkMSec elapsed = timer.elapsedMsInt();
|
|
|
|
if (reporter.fError) {
|
|
|
|
fStatus->reportFailure();
|
|
|
|
}
|
|
|
|
fStatus->endTest(fTest.name, !reporter.fError, elapsed, reporter.fTestCount);
|
2013-04-19 13:24:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2015-01-20 17:30:20 +00:00
|
|
|
Test fTest;
|
|
|
|
Status* fStatus;
|
2013-04-19 13:24:28 +00:00
|
|
|
};
|
2013-04-11 16:54:09 +00:00
|
|
|
|
2014-01-30 15:30:50 +00:00
|
|
|
static bool should_run(const char* testName, bool isGPUTest) {
|
2019-03-20 15:50:33 +00:00
|
|
|
if (CommandLineFlags::ShouldSkip(FLAGS_match, testName)) {
|
2014-01-30 15:30:50 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (!FLAGS_cpu && !isGPUTest) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (!FLAGS_gpu && isGPUTest) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2017-02-06 17:46:20 +00:00
|
|
|
int main(int argc, char** argv) {
|
2019-03-20 15:50:33 +00:00
|
|
|
CommandLineFlags::Parse(argc, argv);
|
2016-11-15 18:22:25 +00:00
|
|
|
#if DEBUG_DUMP_VERIFY
|
2016-10-24 12:10:14 +00:00
|
|
|
SkPathOpsDebug::gDumpOp = FLAGS_dumpOp;
|
|
|
|
SkPathOpsDebug::gVerifyOp = FLAGS_verifyOp;
|
|
|
|
#endif
|
2017-01-23 14:38:52 +00:00
|
|
|
SkPathOpsDebug::gRunFail = FLAGS_runFail;
|
2016-10-24 12:10:14 +00:00
|
|
|
SkPathOpsDebug::gVeryVerbose = FLAGS_veryVerbose;
|
2018-08-07 20:38:21 +00:00
|
|
|
PathOpsDebug::gOutFirst = true;
|
2018-08-08 13:48:09 +00:00
|
|
|
PathOpsDebug::gCheckForDuplicateNames = false;
|
2018-08-08 17:17:25 +00:00
|
|
|
PathOpsDebug::gOutputSVG = false;
|
2018-08-07 20:38:21 +00:00
|
|
|
if ((PathOpsDebug::gJson = !FLAGS_json.isEmpty())) {
|
|
|
|
PathOpsDebug::gOut = fopen(FLAGS_json[0], "wb");
|
|
|
|
fprintf(PathOpsDebug::gOut, "{\n");
|
|
|
|
FLAGS_threads = 0;
|
2018-08-27 20:10:28 +00:00
|
|
|
PathOpsDebug::gMarkJsonFlaky = false;
|
2018-08-07 20:38:21 +00:00
|
|
|
}
|
2014-06-18 18:44:15 +00:00
|
|
|
SetupCrashHandler();
|
2013-04-11 16:54:09 +00:00
|
|
|
|
2014-11-07 14:12:30 +00:00
|
|
|
SkAutoGraphics ag;
|
2011-09-02 15:06:44 +00:00
|
|
|
|
2011-10-26 15:25:18 +00:00
|
|
|
{
|
|
|
|
SkString header("Skia UnitTests:");
|
2013-04-11 18:27:52 +00:00
|
|
|
if (!FLAGS_match.isEmpty()) {
|
2013-05-02 13:14:40 +00:00
|
|
|
header.appendf(" --match");
|
|
|
|
for (int index = 0; index < FLAGS_match.count(); ++index) {
|
|
|
|
header.appendf(" %s", FLAGS_match[index]);
|
|
|
|
}
|
2011-10-26 15:25:18 +00:00
|
|
|
}
|
2015-01-20 17:30:20 +00:00
|
|
|
SkString tmpDir = skiatest::GetTmpDir();
|
2013-06-06 14:59:56 +00:00
|
|
|
if (!tmpDir.isEmpty()) {
|
|
|
|
header.appendf(" --tmpDir %s", tmpDir.c_str());
|
2013-03-20 13:48:20 +00:00
|
|
|
}
|
2014-06-18 21:32:48 +00:00
|
|
|
SkString resourcePath = GetResourcePath();
|
2013-06-06 14:59:56 +00:00
|
|
|
if (!resourcePath.isEmpty()) {
|
|
|
|
header.appendf(" --resourcePath %s", resourcePath.c_str());
|
2013-02-25 20:24:24 +00:00
|
|
|
}
|
2016-10-24 12:10:14 +00:00
|
|
|
#if DEBUG_COIN
|
|
|
|
if (FLAGS_coinTest) {
|
|
|
|
header.appendf(" -c");
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
if (FLAGS_dumpOp) {
|
|
|
|
header.appendf(" -d");
|
|
|
|
}
|
2017-01-23 14:38:52 +00:00
|
|
|
#ifdef SK_DEBUG
|
|
|
|
if (FLAGS_runFail) {
|
|
|
|
header.appendf(" -f");
|
|
|
|
}
|
|
|
|
#endif
|
2016-10-24 12:10:14 +00:00
|
|
|
if (FLAGS_verbose) {
|
|
|
|
header.appendf(" -v");
|
|
|
|
}
|
|
|
|
if (FLAGS_veryVerbose) {
|
|
|
|
header.appendf(" -V");
|
|
|
|
}
|
|
|
|
if (FLAGS_extendedTest) {
|
|
|
|
header.appendf(" -x");
|
|
|
|
}
|
|
|
|
if (FLAGS_verifyOp) {
|
|
|
|
header.appendf(" -y");
|
|
|
|
}
|
2011-10-26 15:25:18 +00:00
|
|
|
#ifdef SK_DEBUG
|
|
|
|
header.append(" SK_DEBUG");
|
|
|
|
#else
|
|
|
|
header.append(" SK_RELEASE");
|
|
|
|
#endif
|
2014-04-14 17:08:59 +00:00
|
|
|
if (FLAGS_veryVerbose) {
|
|
|
|
header.appendf("\n");
|
|
|
|
}
|
2015-02-19 14:32:12 +00:00
|
|
|
SkDebugf("%s", header.c_str());
|
2011-10-26 15:25:18 +00:00
|
|
|
}
|
|
|
|
|
2009-04-02 16:59:40 +00:00
|
|
|
|
2013-04-22 16:43:07 +00:00
|
|
|
// Count tests first.
|
|
|
|
int total = 0;
|
|
|
|
int toRun = 0;
|
2013-07-24 17:24:23 +00:00
|
|
|
|
2018-07-30 21:07:07 +00:00
|
|
|
for (const Test& test : TestRegistry::Range()) {
|
2015-01-20 17:30:20 +00:00
|
|
|
if (should_run(test.name, test.needsGpu)) {
|
2013-04-22 16:43:07 +00:00
|
|
|
toRun++;
|
|
|
|
}
|
|
|
|
total++;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Now run them.
|
2011-09-02 15:06:44 +00:00
|
|
|
int skipCount = 0;
|
2013-04-19 13:24:28 +00:00
|
|
|
|
SkThreadPool ~~> SkTaskGroup
SkTaskGroup is like SkThreadPool except the threads stay in
one global pool. Each SkTaskGroup itself is tiny (4 bytes)
and its wait() method applies only to tasks add()ed to that
instance, not the whole thread pool.
This means we don't need to bring up new thread pools when
tests themselves want to use multithreading (e.g. pathops,
quilt). We just create a new SkTaskGroup and wait for that
to complete. This should be more efficient, and allow us
to expand where we use threads to really latency sensitive
places. E.g. we can probably now use these in nanobench
for CPU .skp rendering.
Now that all threads are sharing the same pool, I think we
can remove most of the custom mechanism pathops tests use
to control threading. They'll just ride on the global pool
with all other tests now.
This (temporarily?) removes the GPU multithreading feature
from DM, which we don't use.
On my desktop, DM runs a little faster (57s -> 55s) in
Debug, and a lot faster in Release (36s -> 24s). The bots
show speedups of similar proportions, cutting more than a
minute off the N4/Release and Win7/Debug runtimes.
BUG=skia:
Committed: https://skia.googlesource.com/skia/+/9c7207b5dc71dc5a96a2eb107d401133333d5b6f
R=caryclark@google.com, bsalomon@google.com, bungeman@google.com, mtklein@google.com, reed@google.com
Author: mtklein@chromium.org
Review URL: https://codereview.chromium.org/531653002
2014-09-03 22:34:37 +00:00
|
|
|
SkTaskGroup::Enabler enabled(FLAGS_threads);
|
|
|
|
SkTaskGroup cpuTests;
|
2015-01-20 17:30:20 +00:00
|
|
|
SkTArray<const Test*> gpuTests;
|
2014-01-02 16:19:53 +00:00
|
|
|
|
2015-01-20 17:30:20 +00:00
|
|
|
Status status(toRun);
|
2018-07-30 21:07:07 +00:00
|
|
|
|
|
|
|
for (const Test& test : TestRegistry::Range()) {
|
2015-01-20 17:30:20 +00:00
|
|
|
if (!should_run(test.name, test.needsGpu)) {
|
2011-09-02 15:06:44 +00:00
|
|
|
++skipCount;
|
2015-01-20 17:30:20 +00:00
|
|
|
} else if (test.needsGpu) {
|
|
|
|
gpuTests.push_back(&test);
|
2011-09-02 15:06:44 +00:00
|
|
|
} else {
|
2016-02-17 03:06:15 +00:00
|
|
|
cpuTests.add(SkTestRunnable(test, &status));
|
2011-09-02 15:06:44 +00:00
|
|
|
}
|
2009-02-27 16:24:51 +00:00
|
|
|
}
|
2009-04-09 04:06:54 +00:00
|
|
|
|
2014-02-26 16:31:22 +00:00
|
|
|
// Run GPU tests on this thread.
|
|
|
|
for (int i = 0; i < gpuTests.count(); i++) {
|
2017-11-15 20:48:03 +00:00
|
|
|
SkTestRunnable(*gpuTests[i], &status)();
|
2013-04-19 13:24:28 +00:00
|
|
|
}
|
|
|
|
|
2013-10-10 18:49:04 +00:00
|
|
|
// Block until threaded tests finish.
|
SkThreadPool ~~> SkTaskGroup
SkTaskGroup is like SkThreadPool except the threads stay in
one global pool. Each SkTaskGroup itself is tiny (4 bytes)
and its wait() method applies only to tasks add()ed to that
instance, not the whole thread pool.
This means we don't need to bring up new thread pools when
tests themselves want to use multithreading (e.g. pathops,
quilt). We just create a new SkTaskGroup and wait for that
to complete. This should be more efficient, and allow us
to expand where we use threads to really latency sensitive
places. E.g. we can probably now use these in nanobench
for CPU .skp rendering.
Now that all threads are sharing the same pool, I think we
can remove most of the custom mechanism pathops tests use
to control threading. They'll just ride on the global pool
with all other tests now.
This (temporarily?) removes the GPU multithreading feature
from DM, which we don't use.
On my desktop, DM runs a little faster (57s -> 55s) in
Debug, and a lot faster in Release (36s -> 24s). The bots
show speedups of similar proportions, cutting more than a
minute off the N4/Release and Win7/Debug runtimes.
BUG=skia:
Committed: https://skia.googlesource.com/skia/+/9c7207b5dc71dc5a96a2eb107d401133333d5b6f
R=caryclark@google.com, bsalomon@google.com, bungeman@google.com, mtklein@google.com, reed@google.com
Author: mtklein@chromium.org
Review URL: https://codereview.chromium.org/531653002
2014-09-03 22:34:37 +00:00
|
|
|
cpuTests.wait();
|
2013-04-19 13:24:28 +00:00
|
|
|
|
2014-01-02 16:19:53 +00:00
|
|
|
if (FLAGS_verbose) {
|
2015-01-20 17:30:20 +00:00
|
|
|
SkDebugf(
|
|
|
|
"\nFinished %d tests, %d failures, %d skipped. "
|
|
|
|
"(%d internal tests)",
|
|
|
|
toRun, status.failCount(), skipCount, status.testCount());
|
2015-10-16 16:03:38 +00:00
|
|
|
if (gVerboseFinalize) {
|
|
|
|
(*gVerboseFinalize)();
|
|
|
|
}
|
2013-04-10 15:57:31 +00:00
|
|
|
}
|
2012-07-22 22:33:05 +00:00
|
|
|
|
2014-01-02 16:19:53 +00:00
|
|
|
SkDebugf("\n");
|
2016-10-04 14:01:04 +00:00
|
|
|
#if DEBUG_COIN
|
|
|
|
if (FLAGS_coinTest) {
|
|
|
|
SkPathOpsDebug::DumpCoinDict();
|
|
|
|
}
|
|
|
|
#endif
|
2018-08-07 20:38:21 +00:00
|
|
|
if (PathOpsDebug::gJson) {
|
|
|
|
fprintf(PathOpsDebug::gOut, "\n}\n");
|
|
|
|
fclose(PathOpsDebug::gOut);
|
|
|
|
}
|
2015-01-20 17:30:20 +00:00
|
|
|
return (status.failCount() == 0) ? 0 : 1;
|
2009-02-27 16:24:51 +00:00
|
|
|
}
|