re-land r8514 without breaking 32-bit Mac compilers
Review URL: https://codereview.chromium.org/13427004 git-svn-id: http://skia.googlecode.com/svn/trunk@8518 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
97f57a6c4f
commit
310478e72c
@ -9,21 +9,48 @@
|
|||||||
* Error codes used by gmmain.cpp.
|
* Error codes used by gmmain.cpp.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#ifndef gm_error_DEFINED
|
||||||
|
#define gm_error_DEFINED
|
||||||
|
|
||||||
|
#include "gm.h"
|
||||||
|
|
||||||
namespace skiagm {
|
namespace skiagm {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The complete list of error types we might encounter in GM.
|
* The complete list of error types we might encounter in GM.
|
||||||
*/
|
*/
|
||||||
enum ErrorType {
|
enum ErrorType {
|
||||||
#if SK_SUPPORT_GPU
|
// Even though kNoGpuContext_ErrorType only occurs when SK_SUPPORT_GPU
|
||||||
|
// is turned on, we always include this type in our enum so that
|
||||||
|
// reports will be consistent whether SK_SUPPORT_GPU is turned on
|
||||||
|
// or off (as long as the number of these errors is 0).
|
||||||
kNoGpuContext_ErrorType,
|
kNoGpuContext_ErrorType,
|
||||||
#endif
|
|
||||||
kImageMismatch_ErrorType,
|
kImageMismatch_ErrorType,
|
||||||
kMissingExpectations_ErrorType,
|
kMissingExpectations_ErrorType,
|
||||||
kWritingReferenceImage_ErrorType,
|
kWritingReferenceImage_ErrorType,
|
||||||
kLast_ErrorType = kWritingReferenceImage_ErrorType
|
kLast_ErrorType = kWritingReferenceImage_ErrorType
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the name of the given ErrorType.
|
||||||
|
*/
|
||||||
|
static const char *getErrorTypeName(ErrorType type) {
|
||||||
|
switch(type) {
|
||||||
|
case kNoGpuContext_ErrorType:
|
||||||
|
return "NoGpuContext";
|
||||||
|
case kImageMismatch_ErrorType:
|
||||||
|
return "ImageMismatch";
|
||||||
|
case kMissingExpectations_ErrorType:
|
||||||
|
return "MissingExpectations";
|
||||||
|
case kWritingReferenceImage_ErrorType:
|
||||||
|
return "WritingReferenceImage";
|
||||||
|
}
|
||||||
|
// control should never reach here
|
||||||
|
SkDEBUGFAIL("getErrorTypeName() called with unknown type");
|
||||||
|
return "Unknown";
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A combination of 0 or more ErrorTypes.
|
* A combination of 0 or more ErrorTypes.
|
||||||
*/
|
*/
|
||||||
@ -87,3 +114,5 @@ namespace skiagm {
|
|||||||
// No errors at all.
|
// No errors at all.
|
||||||
const static ErrorCombination kEmpty_ErrorCombination;
|
const static ErrorCombination kEmpty_ErrorCombination;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#endif // ifndef gm_error_DEFINED
|
||||||
|
254
gm/gmmain.cpp
254
gm/gmmain.cpp
@ -34,6 +34,7 @@
|
|||||||
#include "SkRefCnt.h"
|
#include "SkRefCnt.h"
|
||||||
#include "SkStream.h"
|
#include "SkStream.h"
|
||||||
#include "SkTArray.h"
|
#include "SkTArray.h"
|
||||||
|
#include "SkTDict.h"
|
||||||
#include "SkTileGridPicture.h"
|
#include "SkTileGridPicture.h"
|
||||||
#include "SamplePipeControllers.h"
|
#include "SamplePipeControllers.h"
|
||||||
|
|
||||||
@ -85,14 +86,6 @@ extern bool gSkSuppressFontCachePurgeSpew;
|
|||||||
|
|
||||||
using namespace skiagm;
|
using namespace skiagm;
|
||||||
|
|
||||||
struct FailRec {
|
|
||||||
SkString fName;
|
|
||||||
bool fIsPixelError;
|
|
||||||
|
|
||||||
FailRec() : fIsPixelError(false) {}
|
|
||||||
FailRec(const SkString& name) : fName(name), fIsPixelError(false) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
class Iter {
|
class Iter {
|
||||||
public:
|
public:
|
||||||
Iter() {
|
Iter() {
|
||||||
@ -183,12 +176,9 @@ static PipeFlagComboData gPipeWritingFlagCombos[] = {
|
|||||||
|
|
||||||
class GMMain {
|
class GMMain {
|
||||||
public:
|
public:
|
||||||
GMMain() {
|
GMMain() : fUseFileHierarchy(false), fMismatchPath(NULL), fTestsRun(0),
|
||||||
// Set default values of member variables, which tool_main()
|
fRenderModesEncountered(1) {
|
||||||
// may override.
|
|
||||||
fUseFileHierarchy = false;
|
|
||||||
fIgnorableErrorCombination.add(kMissingExpectations_ErrorType);
|
fIgnorableErrorCombination.add(kMissingExpectations_ErrorType);
|
||||||
fMismatchPath = NULL;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SkString make_name(const char shortName[], const char configName[]) {
|
SkString make_name(const char shortName[], const char configName[]) {
|
||||||
@ -242,36 +232,105 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Records the errors encountered in fFailedTests, except for any error
|
* Add all render modes encountered thus far to the "modes" array.
|
||||||
* types we want to ignore.
|
|
||||||
*/
|
*/
|
||||||
void RecordError(const ErrorCombination& errorCombination, const SkString& name,
|
void GetRenderModesEncountered(SkTArray<SkString> &modes) {
|
||||||
const char renderModeDescriptor []) {
|
SkTDict<int>::Iter iter(this->fRenderModesEncountered);
|
||||||
// The common case: no error means nothing to record.
|
const char* mode;
|
||||||
|
while ((mode = iter.next(NULL)) != NULL) {
|
||||||
|
SkString modeAsString = SkString(mode);
|
||||||
|
// TODO(epoger): It seems a bit silly that all of these modes were
|
||||||
|
// recorded with a leading "-" which we have to remove here
|
||||||
|
// (except for mode "", which means plain old original mode).
|
||||||
|
// But that's how renderModeDescriptor has been passed into
|
||||||
|
// compare_test_results_to_reference_bitmap() historically,
|
||||||
|
// and changing that now may affect other parts of our code.
|
||||||
|
if (modeAsString.startsWith("-")) {
|
||||||
|
modeAsString.remove(0, 1);
|
||||||
|
modes.push_back(modeAsString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Records the results of this test in fTestsRun and fFailedTests.
|
||||||
|
*
|
||||||
|
* We even record successes, and errors that we regard as
|
||||||
|
* "ignorable"; we can filter them out later.
|
||||||
|
*/
|
||||||
|
void RecordTestResults(const ErrorCombination& errorCombination, const SkString& name,
|
||||||
|
const char renderModeDescriptor []) {
|
||||||
|
// Things to do regardless of errorCombination.
|
||||||
|
fTestsRun++;
|
||||||
|
int renderModeCount = 0;
|
||||||
|
this->fRenderModesEncountered.find(renderModeDescriptor, &renderModeCount);
|
||||||
|
renderModeCount++;
|
||||||
|
this->fRenderModesEncountered.set(renderModeDescriptor, renderModeCount);
|
||||||
|
|
||||||
if (errorCombination.isEmpty()) {
|
if (errorCombination.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If only certain error type(s) were reported, we know we can ignore them.
|
// Things to do only if there is some error condition.
|
||||||
if (errorCombination.minus(fIgnorableErrorCombination).isEmpty()) {
|
SkString fullName = make_name(name.c_str(), renderModeDescriptor);
|
||||||
return;
|
for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
|
||||||
}
|
ErrorType type = static_cast<ErrorType>(typeInt);
|
||||||
|
if (errorCombination.includes(type)) {
|
||||||
FailRec& rec = fFailedTests.push_back(make_name(name.c_str(), renderModeDescriptor));
|
fFailedTests[type].push_back(fullName);
|
||||||
rec.fIsPixelError = errorCombination.includes(kImageMismatch_ErrorType);
|
|
||||||
}
|
|
||||||
|
|
||||||
// List contents of fFailedTests via SkDebug.
|
|
||||||
void ListErrors() {
|
|
||||||
for (int i = 0; i < fFailedTests.count(); ++i) {
|
|
||||||
if (fFailedTests[i].fIsPixelError) {
|
|
||||||
gm_fprintf(stderr, "\t\t%s pixel_error\n", fFailedTests[i].fName.c_str());
|
|
||||||
} else {
|
|
||||||
gm_fprintf(stderr, "\t\t%s\n", fFailedTests[i].fName.c_str());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the number of significant (non-ignorable) errors we have
|
||||||
|
* encountered so far.
|
||||||
|
*/
|
||||||
|
int NumSignificantErrors() {
|
||||||
|
int significantErrors = 0;
|
||||||
|
for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
|
||||||
|
ErrorType type = static_cast<ErrorType>(typeInt);
|
||||||
|
if (!fIgnorableErrorCombination.includes(type)) {
|
||||||
|
significantErrors += fFailedTests[type].count();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return significantErrors;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List contents of fFailedTests to stdout.
|
||||||
|
*/
|
||||||
|
void ListErrors() {
|
||||||
|
// First, print a single summary line.
|
||||||
|
SkString summary;
|
||||||
|
summary.appendf("Ran %d tests:", fTestsRun);
|
||||||
|
for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
|
||||||
|
ErrorType type = static_cast<ErrorType>(typeInt);
|
||||||
|
summary.appendf(" %s=%d", getErrorTypeName(type), fFailedTests[type].count());
|
||||||
|
}
|
||||||
|
gm_fprintf(stdout, "%s\n", summary.c_str());
|
||||||
|
|
||||||
|
// Now, for each failure type, list the tests that failed that way.
|
||||||
|
for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
|
||||||
|
SkString line;
|
||||||
|
ErrorType type = static_cast<ErrorType>(typeInt);
|
||||||
|
if (fIgnorableErrorCombination.includes(type)) {
|
||||||
|
line.append("[ ] ");
|
||||||
|
} else {
|
||||||
|
line.append("[*] ");
|
||||||
|
}
|
||||||
|
|
||||||
|
SkTArray<SkString> *failedTestsOfThisType = &fFailedTests[type];
|
||||||
|
int count = failedTestsOfThisType->count();
|
||||||
|
line.appendf("%d %s:", count, getErrorTypeName(type));
|
||||||
|
for (int i = 0; i < count; ++i) {
|
||||||
|
line.append(" ");
|
||||||
|
line.append((*failedTestsOfThisType)[i]);
|
||||||
|
}
|
||||||
|
gm_fprintf(stdout, "%s\n", line.c_str());
|
||||||
|
}
|
||||||
|
gm_fprintf(stdout, "(results marked with [*] will cause nonzero return value)\n");
|
||||||
|
}
|
||||||
|
|
||||||
static bool write_document(const SkString& path,
|
static bool write_document(const SkString& path,
|
||||||
const SkDynamicMemoryWStream& document) {
|
const SkDynamicMemoryWStream& document) {
|
||||||
SkFILEWStream stream(path.c_str());
|
SkFILEWStream stream(path.c_str());
|
||||||
@ -542,7 +601,16 @@ public:
|
|||||||
} else {
|
} else {
|
||||||
gm_fprintf(stderr, "FAILED to write %s\n", path.c_str());
|
gm_fprintf(stderr, "FAILED to write %s\n", path.c_str());
|
||||||
ErrorCombination errors(kWritingReferenceImage_ErrorType);
|
ErrorCombination errors(kWritingReferenceImage_ErrorType);
|
||||||
RecordError(errors, name, renderModeDescriptor);
|
// TODO(epoger): Don't call RecordTestResults() here...
|
||||||
|
// Instead, we should make sure to call RecordTestResults
|
||||||
|
// exactly ONCE per test. (Otherwise, gmmain.fTestsRun
|
||||||
|
// will be incremented twice for this test: once in
|
||||||
|
// compare_test_results_to_stored_expectations() before
|
||||||
|
// that method calls this one, and again here.)
|
||||||
|
//
|
||||||
|
// When we make that change, we should probably add a
|
||||||
|
// WritingReferenceImage test to the gm self-tests.)
|
||||||
|
RecordTestResults(errors, name, renderModeDescriptor);
|
||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -654,7 +722,7 @@ public:
|
|||||||
report_bitmap_diffs(*expectedBitmapPtr, actualBitmap, completeName);
|
report_bitmap_diffs(*expectedBitmapPtr, actualBitmap, completeName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RecordError(errors, baseNameString, renderModeDescriptor);
|
RecordTestResults(errors, baseNameString, renderModeDescriptor);
|
||||||
|
|
||||||
if (addToJsonSummary) {
|
if (addToJsonSummary) {
|
||||||
add_actual_results_to_json_summary(completeName, actualChecksum, errors,
|
add_actual_results_to_json_summary(completeName, actualChecksum, errors,
|
||||||
@ -770,6 +838,7 @@ public:
|
|||||||
add_actual_results_to_json_summary(name.c_str(), actualChecksum,
|
add_actual_results_to_json_summary(name.c_str(), actualChecksum,
|
||||||
ErrorCombination(kMissingExpectations_ErrorType),
|
ErrorCombination(kMissingExpectations_ErrorType),
|
||||||
false);
|
false);
|
||||||
|
RecordTestResults(ErrorCombination(kMissingExpectations_ErrorType), name, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Consider moving this into compare_to_expectations(),
|
// TODO: Consider moving this into compare_to_expectations(),
|
||||||
@ -797,6 +866,13 @@ public:
|
|||||||
GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
|
GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
|
||||||
SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
|
SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
|
||||||
|
|
||||||
|
// TODO(epoger): This method is run to compare results across
|
||||||
|
// different rendering modes (as opposed to
|
||||||
|
// compare_test_results_to_stored_expectations(), which
|
||||||
|
// compares results against expectations stored on disk). If
|
||||||
|
// we would like the GenerateGMs step to distinguish between
|
||||||
|
// those two types of mismatches, we should report image
|
||||||
|
// mismatches in here with a different ErrorType.
|
||||||
SkASSERT(referenceBitmap);
|
SkASSERT(referenceBitmap);
|
||||||
SkString name = make_name(gm->shortName(), gRec.fName);
|
SkString name = make_name(gm->shortName(), gRec.fName);
|
||||||
Expectations expectations(*referenceBitmap);
|
Expectations expectations(*referenceBitmap);
|
||||||
@ -910,6 +986,8 @@ public:
|
|||||||
// ('image-surface gm test is failing in "deferred" mode,
|
// ('image-surface gm test is failing in "deferred" mode,
|
||||||
// and gm is not reporting the failure')
|
// and gm is not reporting the failure')
|
||||||
if (errors.isEmpty()) {
|
if (errors.isEmpty()) {
|
||||||
|
// TODO(epoger): Report this as a new ErrorType,
|
||||||
|
// something like kImageGeneration_ErrorType?
|
||||||
return kEmpty_ErrorCombination;
|
return kEmpty_ErrorCombination;
|
||||||
}
|
}
|
||||||
return compare_test_results_to_reference_bitmap(
|
return compare_test_results_to_reference_bitmap(
|
||||||
@ -983,8 +1061,10 @@ public:
|
|||||||
|
|
||||||
const char* fMismatchPath;
|
const char* fMismatchPath;
|
||||||
|
|
||||||
// information about all failed tests we have encountered so far
|
// collection of tests that have failed with each ErrorType
|
||||||
SkTArray<FailRec> fFailedTests;
|
SkTArray<SkString> fFailedTests[kLast_ErrorType+1];
|
||||||
|
int fTestsRun;
|
||||||
|
SkTDict<int> fRenderModesEncountered;
|
||||||
|
|
||||||
// Where to read expectations (expected image checksums, etc.) from.
|
// Where to read expectations (expected image checksums, etc.) from.
|
||||||
// If unset, we don't do comparisons.
|
// If unset, we don't do comparisons.
|
||||||
@ -1292,7 +1372,9 @@ ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co
|
|||||||
ErrorCombination errorsForAllModes;
|
ErrorCombination errorsForAllModes;
|
||||||
uint32_t gmFlags = gm->getFlags();
|
uint32_t gmFlags = gm->getFlags();
|
||||||
|
|
||||||
// run the picture centric GM steps
|
// TODO(epoger): We should start recording any per-GM skipped
|
||||||
|
// modes (i.e. those we skipped due to gmFlags) with a new
|
||||||
|
// ErrorType, perhaps named kIntentionallySkipped_ErrorType.
|
||||||
if (!(gmFlags & GM::kSkipPicture_Flag)) {
|
if (!(gmFlags & GM::kSkipPicture_Flag)) {
|
||||||
|
|
||||||
ErrorCombination pictErrors;
|
ErrorCombination pictErrors;
|
||||||
@ -1390,6 +1472,45 @@ ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co
|
|||||||
return errorsForAllModes;
|
return errorsForAllModes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a list of all entries in an array of strings as a single string
|
||||||
|
* of this form:
|
||||||
|
* "item1", "item2", "item3"
|
||||||
|
*/
|
||||||
|
SkString list_all(const SkTArray<SkString> &stringArray);
|
||||||
|
SkString list_all(const SkTArray<SkString> &stringArray) {
|
||||||
|
SkString total;
|
||||||
|
for (int i = 0; i < stringArray.count(); i++) {
|
||||||
|
if (i > 0) {
|
||||||
|
total.append(", ");
|
||||||
|
}
|
||||||
|
total.append("\"");
|
||||||
|
total.append(stringArray[i]);
|
||||||
|
total.append("\"");
|
||||||
|
}
|
||||||
|
return total;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a list of configuration names, as a single string of this form:
|
||||||
|
* "item1", "item2", "item3"
|
||||||
|
*
|
||||||
|
* @param configs configurations, as a list of indices into gRec
|
||||||
|
*/
|
||||||
|
SkString list_all_config_names(const SkTDArray<size_t> &configs);
|
||||||
|
SkString list_all_config_names(const SkTDArray<size_t> &configs) {
|
||||||
|
SkString total;
|
||||||
|
for (int i = 0; i < configs.count(); i++) {
|
||||||
|
if (i > 0) {
|
||||||
|
total.append(", ");
|
||||||
|
}
|
||||||
|
total.append("\"");
|
||||||
|
total.append(gRec[configs[i]].fName);
|
||||||
|
total.append("\"");
|
||||||
|
}
|
||||||
|
return total;
|
||||||
|
}
|
||||||
|
|
||||||
int tool_main(int argc, char** argv);
|
int tool_main(int argc, char** argv);
|
||||||
int tool_main(int argc, char** argv) {
|
int tool_main(int argc, char** argv) {
|
||||||
|
|
||||||
@ -1571,12 +1692,7 @@ int tool_main(int argc, char** argv) {
|
|||||||
moduloRemainder = -1;
|
moduloRemainder = -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Accumulate success of all tests.
|
int gmsRun = 0;
|
||||||
int testsRun = 0;
|
|
||||||
int testsPassed = 0;
|
|
||||||
int testsFailed = 0;
|
|
||||||
int testsMissingReferenceImages = 0;
|
|
||||||
|
|
||||||
int gmIndex = -1;
|
int gmIndex = -1;
|
||||||
SkString moduloStr;
|
SkString moduloStr;
|
||||||
|
|
||||||
@ -1616,42 +1732,44 @@ int tool_main(int argc, char** argv) {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gmsRun++;
|
||||||
SkISize size = gm->getISize();
|
SkISize size = gm->getISize();
|
||||||
gm_fprintf(stdout, "%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName,
|
gm_fprintf(stdout, "%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName,
|
||||||
size.width(), size.height());
|
size.width(), size.height());
|
||||||
|
|
||||||
ErrorCombination testErrors;
|
run_multiple_configs(gmmain, gm, configs, grFactory);
|
||||||
testErrors.add(run_multiple_configs(gmmain, gm, configs, grFactory));
|
|
||||||
|
|
||||||
SkBitmap comparisonBitmap;
|
SkBitmap comparisonBitmap;
|
||||||
const ConfigData compareConfig =
|
const ConfigData compareConfig =
|
||||||
{ SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextType, 0, kRW_ConfigFlag, "comparison", false };
|
{ SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextType, 0, kRW_ConfigFlag, "comparison", false };
|
||||||
testErrors.add(gmmain.generate_image(gm, compareConfig, NULL, &comparisonBitmap, false));
|
gmmain.generate_image(gm, compareConfig, NULL, &comparisonBitmap, false);
|
||||||
|
|
||||||
// TODO(epoger): only run this if gmmain.generate_image() succeeded?
|
// TODO(epoger): only run this if gmmain.generate_image() succeeded?
|
||||||
// Otherwise, what are we comparing against?
|
// Otherwise, what are we comparing against?
|
||||||
testErrors.add(run_multiple_modes(gmmain, gm, compareConfig, comparisonBitmap,
|
run_multiple_modes(gmmain, gm, compareConfig, comparisonBitmap, tileGridReplayScales);
|
||||||
tileGridReplayScales));
|
|
||||||
|
|
||||||
// Update overall results.
|
|
||||||
// We only tabulate the particular error types that we currently
|
|
||||||
// care about (e.g., missing reference images). Later on, if we
|
|
||||||
// want to also tabulate other error types, we can do so.
|
|
||||||
testsRun++;
|
|
||||||
if (!gmmain.fExpectationsSource.get() ||
|
|
||||||
(testErrors.includes(kMissingExpectations_ErrorType))) {
|
|
||||||
testsMissingReferenceImages++;
|
|
||||||
}
|
|
||||||
if (testErrors.minus(gmmain.fIgnorableErrorCombination).isEmpty()) {
|
|
||||||
testsPassed++;
|
|
||||||
} else {
|
|
||||||
testsFailed++;
|
|
||||||
}
|
|
||||||
|
|
||||||
SkDELETE(gm);
|
SkDELETE(gm);
|
||||||
}
|
}
|
||||||
gm_fprintf(stdout, "Ran %d tests: %d passed, %d failed, %d missing reference images\n",
|
|
||||||
testsRun, testsPassed, testsFailed, testsMissingReferenceImages);
|
SkTArray<SkString> modes;
|
||||||
|
gmmain.GetRenderModesEncountered(modes);
|
||||||
|
|
||||||
|
// Output summary to stdout.
|
||||||
|
gm_fprintf(stdout, "Ran %d GMs\n", gmsRun);
|
||||||
|
gm_fprintf(stdout, "... over %2d configs [%s]\n", configs.count(),
|
||||||
|
list_all_config_names(configs).c_str());
|
||||||
|
gm_fprintf(stdout, "... and %2d modes [%s]\n", modes.count(), list_all(modes).c_str());
|
||||||
|
gm_fprintf(stdout, "... so there should be a total of %d tests.\n",
|
||||||
|
gmsRun * (configs.count() + modes.count()));
|
||||||
|
|
||||||
|
// TODO(epoger): Ultimately, we should signal an error if the
|
||||||
|
// expected total number of tests (displayed above) does not match
|
||||||
|
// gmmain.fTestsRun. But for now, there are cases where those
|
||||||
|
// numbers won't match: specifically, if some configs/modes are
|
||||||
|
// skipped on a per-GM basis (due to gm->getFlags() for a specific
|
||||||
|
// GM). Later on, we should record tests like that using some new
|
||||||
|
// ErrorType, like kIntentionallySkipped_ErrorType. Then we could
|
||||||
|
// signal an error if the totals didn't match up.
|
||||||
gmmain.ListErrors();
|
gmmain.ListErrors();
|
||||||
|
|
||||||
if (FLAGS_writeJsonSummaryPath.count() == 1) {
|
if (FLAGS_writeJsonSummaryPath.count() == 1) {
|
||||||
@ -1691,7 +1809,7 @@ int tool_main(int argc, char** argv) {
|
|||||||
#endif
|
#endif
|
||||||
SkGraphics::Term();
|
SkGraphics::Term();
|
||||||
|
|
||||||
return (0 == testsFailed) ? 0 : -1;
|
return (0 == gmmain.NumSignificantErrors()) ? 0 : -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
void GMMain::installFilter(SkCanvas* canvas) {
|
void GMMain::installFilter(SkCanvas* canvas) {
|
||||||
|
@ -1,4 +1,2 @@
|
|||||||
GM: ---- 8888/selftest1: 60000 (of 60000) differing pixels, max per-channel mismatch R=0 G=111 B=103 A=0
|
GM: ---- 8888/selftest1: 60000 (of 60000) differing pixels, max per-channel mismatch R=0 G=111 B=103 A=0
|
||||||
GM: ---- 565/selftest1: not computing max per-channel pixel mismatch because non-8888
|
GM: ---- 565/selftest1: not computing max per-channel pixel mismatch because non-8888
|
||||||
GM: 8888/selftest1 pixel_error
|
|
||||||
GM: 565/selftest1 pixel_error
|
|
||||||
|
@ -1,3 +1,12 @@
|
|||||||
GM: reading from gm/tests/inputs/images/different-pixels
|
GM: reading from gm/tests/inputs/images/different-pixels
|
||||||
GM: drawing... selftest1 [300 200]
|
GM: drawing... selftest1 [300 200]
|
||||||
GM: Ran 1 tests: 0 passed, 1 failed, 0 missing reference images
|
GM: Ran 1 GMs
|
||||||
|
GM: ... over 2 configs ["8888", "565"]
|
||||||
|
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||||
|
GM: ... so there should be a total of 9 tests.
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=2 MissingExpectations=0 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [*] 2 ImageMismatch: 8888/selftest1 565/selftest1
|
||||||
|
GM: [ ] 0 MissingExpectations:
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
GM: 8888/selftest1 pixel_error
|
|
||||||
GM: 565/selftest1 pixel_error
|
|
@ -1,3 +1,12 @@
|
|||||||
GM: reading expectations from JSON summary file gm/tests/inputs/json/different-pixels.json
|
GM: reading expectations from JSON summary file gm/tests/inputs/json/different-pixels.json
|
||||||
GM: drawing... selftest1 [300 200]
|
GM: drawing... selftest1 [300 200]
|
||||||
GM: Ran 1 tests: 0 passed, 1 failed, 0 missing reference images
|
GM: Ran 1 GMs
|
||||||
|
GM: ... over 2 configs ["8888", "565"]
|
||||||
|
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||||
|
GM: ... so there should be a total of 9 tests.
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=2 MissingExpectations=0 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [*] 2 ImageMismatch: 8888/selftest1 565/selftest1
|
||||||
|
GM: [ ] 0 MissingExpectations:
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
||||||
|
@ -1,3 +1,12 @@
|
|||||||
GM: reading from gm/tests/inputs/images/empty-dir
|
GM: reading from gm/tests/inputs/images/empty-dir
|
||||||
GM: drawing... selftest1 [300 200]
|
GM: drawing... selftest1 [300 200]
|
||||||
GM: Ran 1 tests: 1 passed, 0 failed, 1 missing reference images
|
GM: Ran 1 GMs
|
||||||
|
GM: ... over 2 configs ["8888", "565"]
|
||||||
|
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||||
|
GM: ... so there should be a total of 9 tests.
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=2 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [*] 0 ImageMismatch:
|
||||||
|
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
||||||
|
@ -1,3 +1,12 @@
|
|||||||
GM: reading from gm/tests/inputs/images/identical-bytes
|
GM: reading from gm/tests/inputs/images/identical-bytes
|
||||||
GM: drawing... selftest1 [300 200]
|
GM: drawing... selftest1 [300 200]
|
||||||
GM: Ran 1 tests: 1 passed, 0 failed, 0 missing reference images
|
GM: Ran 1 GMs
|
||||||
|
GM: ... over 2 configs ["8888", "565"]
|
||||||
|
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||||
|
GM: ... so there should be a total of 9 tests.
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [*] 0 ImageMismatch:
|
||||||
|
GM: [ ] 0 MissingExpectations:
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
||||||
|
@ -1,3 +1,12 @@
|
|||||||
GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-bytes.json
|
GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-bytes.json
|
||||||
GM: drawing... selftest1 [300 200]
|
GM: drawing... selftest1 [300 200]
|
||||||
GM: Ran 1 tests: 1 passed, 0 failed, 0 missing reference images
|
GM: Ran 1 GMs
|
||||||
|
GM: ... over 2 configs ["8888", "565"]
|
||||||
|
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||||
|
GM: ... so there should be a total of 9 tests.
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [*] 0 ImageMismatch:
|
||||||
|
GM: [ ] 0 MissingExpectations:
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
||||||
|
@ -1,3 +1,12 @@
|
|||||||
GM: reading from gm/tests/inputs/images/identical-pixels
|
GM: reading from gm/tests/inputs/images/identical-pixels
|
||||||
GM: drawing... selftest1 [300 200]
|
GM: drawing... selftest1 [300 200]
|
||||||
GM: Ran 1 tests: 1 passed, 0 failed, 0 missing reference images
|
GM: Ran 1 GMs
|
||||||
|
GM: ... over 2 configs ["8888", "565"]
|
||||||
|
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||||
|
GM: ... so there should be a total of 9 tests.
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [*] 0 ImageMismatch:
|
||||||
|
GM: [ ] 0 MissingExpectations:
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
||||||
|
@ -1,3 +1,12 @@
|
|||||||
GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-pixels.json
|
GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-pixels.json
|
||||||
GM: drawing... selftest1 [300 200]
|
GM: drawing... selftest1 [300 200]
|
||||||
GM: Ran 1 tests: 1 passed, 0 failed, 0 missing reference images
|
GM: Ran 1 GMs
|
||||||
|
GM: ... over 2 configs ["8888", "565"]
|
||||||
|
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||||
|
GM: ... so there should be a total of 9 tests.
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [*] 0 ImageMismatch:
|
||||||
|
GM: [ ] 0 MissingExpectations:
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
||||||
|
@ -1,2 +1,11 @@
|
|||||||
GM: drawing... selftest1 [300 200]
|
GM: drawing... selftest1 [300 200]
|
||||||
GM: Ran 1 tests: 1 passed, 0 failed, 1 missing reference images
|
GM: Ran 1 GMs
|
||||||
|
GM: ... over 2 configs ["8888", "565"]
|
||||||
|
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||||
|
GM: ... so there should be a total of 9 tests.
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=2 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [*] 0 ImageMismatch:
|
||||||
|
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
||||||
|
Loading…
Reference in New Issue
Block a user