cleanup: fix gm's use of ErrorBitfield to be consistent (it's a bitfield, not an enum)

Review URL: https://codereview.chromium.org/12640004

git-svn-id: http://skia.googlecode.com/svn/trunk@8028 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
epoger@google.com 2013-03-07 20:28:37 +00:00
parent b8f0de176f
commit f7dba02e12

View File

@ -80,19 +80,16 @@ extern bool gSkSuppressFontCachePurgeSpew;
#define CAN_IMAGE_PDF 0 #define CAN_IMAGE_PDF 0
#endif #endif
// TODO(epoger): We created this ErrorBitfield so that we could record
// multiple error types for the same comparison. But in practice, we
// process its final value in switch() statements, which inherently
// assume that only one error type will be set.
// I think we should probably change this to be an enum, and thus
// constrain ourselves to a single error type per comparison.
typedef int ErrorBitfield; typedef int ErrorBitfield;
const static ErrorBitfield ERROR_NONE = 0x00; // an empty bitfield means no errors:
const static ErrorBitfield ERROR_NO_GPU_CONTEXT = 0x01; const static ErrorBitfield kEmptyErrorBitfield = 0x00;
const static ErrorBitfield ERROR_IMAGE_MISMATCH = 0x02; // individual error types:
// const static ErrorBitfield ERROR_DIMENSION_MISMATCH = 0x04; DEPRECATED in https://codereview.appspot.com/7064047 const static ErrorBitfield kNoGpuContext_ErrorBitmask = 0x01;
const static ErrorBitfield ERROR_READING_REFERENCE_IMAGE = 0x08; const static ErrorBitfield kImageMismatch_ErrorBitmask = 0x02;
const static ErrorBitfield ERROR_WRITING_REFERENCE_IMAGE = 0x10; const static ErrorBitfield kMissingExpectations_ErrorBitmask = 0x04;
const static ErrorBitfield kWritingReferenceImage_ErrorBitmask = 0x08;
// we typically ignore any errors matching this bitmask:
const static ErrorBitfield kIgnorable_ErrorBitmask = kMissingExpectations_ErrorBitmask;
using namespace skiagm; using namespace skiagm;
@ -254,23 +251,19 @@ public:
// of this type. // of this type.
void RecordError(ErrorBitfield errorType, const SkString& name, void RecordError(ErrorBitfield errorType, const SkString& name,
const char renderModeDescriptor []) { const char renderModeDescriptor []) {
bool isPixelError; // The common case: no error means nothing to record.
switch (errorType) { if (kEmptyErrorBitfield == errorType) {
case ERROR_NONE:
return; return;
case ERROR_READING_REFERENCE_IMAGE: }
// If only certain error type(s) were reported, we know we can ignore them.
if (errorType == (errorType & kIgnorable_ErrorBitmask)) {
return; return;
case ERROR_IMAGE_MISMATCH:
isPixelError = true;
break;
default:
isPixelError = false;
break;
} }
FailRec& rec = fFailedTests.push_back(make_name( FailRec& rec = fFailedTests.push_back(make_name(
name.c_str(), renderModeDescriptor)); name.c_str(), renderModeDescriptor));
rec.fIsPixelError = isPixelError; rec.fIsPixelError = (errorType & kImageMismatch_ErrorBitmask);
} }
// List contents of fFailedTests via SkDebug. // List contents of fFailedTests via SkDebug.
@ -410,7 +403,7 @@ public:
#if SK_SUPPORT_GPU #if SK_SUPPORT_GPU
else { // GPU else { // GPU
if (NULL == context) { if (NULL == context) {
return ERROR_NO_GPU_CONTEXT; return kNoGpuContext_ErrorBitmask;
} }
SkAutoTUnref<SkDevice> device(new SkGpuDevice(context, rt)); SkAutoTUnref<SkDevice> device(new SkGpuDevice(context, rt));
if (deferred) { if (deferred) {
@ -428,7 +421,7 @@ public:
} }
#endif #endif
complete_bitmap(bitmap); complete_bitmap(bitmap);
return ERROR_NONE; return kEmptyErrorBitfield;
} }
static void generate_image_from_picture(GM* gm, const ConfigData& gRec, static void generate_image_from_picture(GM* gm, const ConfigData& gRec,
@ -522,12 +515,12 @@ public:
success = write_document(path, *document); success = write_document(path, *document);
} }
if (success) { if (success) {
return ERROR_NONE; return kEmptyErrorBitfield;
} else { } else {
fprintf(stderr, "FAILED to write %s\n", path.c_str()); fprintf(stderr, "FAILED to write %s\n", path.c_str());
RecordError(ERROR_WRITING_REFERENCE_IMAGE, name, RecordError(kWritingReferenceImage_ErrorBitmask, name,
renderModeDescriptor); renderModeDescriptor);
return ERROR_WRITING_REFERENCE_IMAGE; return kWritingReferenceImage_ErrorBitmask;
} }
} }
@ -587,8 +580,9 @@ public:
} }
/** /**
* Compares actual checksum to expectations. * Compares actual checksum to expectations. Returns
* Returns ERROR_NONE if they match, or some particular error code otherwise * kEmptyErrorBitfield if they match, or some combination of
* _ErrorBitmask values otherwise.
* *
* If fMismatchPath has been set, and there are pixel diffs, then the * If fMismatchPath has been set, and there are pixel diffs, then the
* actual bitmap will be written out to a file within fMismatchPath. * actual bitmap will be written out to a file within fMismatchPath.
@ -617,11 +611,11 @@ public:
const char* completeName = completeNameString.c_str(); const char* completeName = completeNameString.c_str();
if (expectations.empty()) { if (expectations.empty()) {
retval = ERROR_READING_REFERENCE_IMAGE; retval = kMissingExpectations_ErrorBitmask;
} else if (expectations.match(actualChecksum)) { } else if (expectations.match(actualChecksum)) {
retval = ERROR_NONE; retval = kEmptyErrorBitfield;
} else { } else {
retval = ERROR_IMAGE_MISMATCH; retval = kImageMismatch_ErrorBitmask;
// Write out the "actuals" for any mismatches, if we have // Write out the "actuals" for any mismatches, if we have
// been directed to do so. // been directed to do so.
@ -662,7 +656,7 @@ public:
Json::Value actualResults; Json::Value actualResults;
actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] = actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] =
asJsonValue(actualChecksum); asJsonValue(actualChecksum);
if (ERROR_NONE == result) { if (kEmptyErrorBitfield == result) {
this->fJsonActualResults_Succeeded[testName] = actualResults; this->fJsonActualResults_Succeeded[testName] = actualResults;
} else { } else {
if (ignoreFailure) { if (ignoreFailure) {
@ -670,13 +664,12 @@ public:
// actual results against expectations in a JSON file // actual results against expectations in a JSON file
// (where we can set ignore-failure to either true or // (where we can set ignore-failure to either true or
// false), add test cases that exercise ignored // false), add test cases that exercise ignored
// failures (both for ERROR_READING_REFERENCE_IMAGE // failures (both for kMissingExpectations_ErrorBitmask
// and ERROR_IMAGE_MISMATCH). // and kImageMismatch_ErrorBitmask).
this->fJsonActualResults_FailureIgnored[testName] = this->fJsonActualResults_FailureIgnored[testName] =
actualResults; actualResults;
} else { } else {
switch(result) { if (result & kMissingExpectations_ErrorBitmask) {
case ERROR_READING_REFERENCE_IMAGE:
// TODO: What about the case where there IS an // TODO: What about the case where there IS an
// expected image checksum, but that gm test // expected image checksum, but that gm test
// doesn't actually run? For now, those cases // doesn't actually run? For now, those cases
@ -690,15 +683,9 @@ public:
// is given but the test is never run). // is given but the test is never run).
this->fJsonActualResults_NoComparison[testName] = this->fJsonActualResults_NoComparison[testName] =
actualResults; actualResults;
break; }
case ERROR_IMAGE_MISMATCH: if (result & kImageMismatch_ErrorBitmask) {
this->fJsonActualResults_Failed[testName] = actualResults; this->fJsonActualResults_Failed[testName] = actualResults;
break;
default:
fprintf(stderr, "encountered unexpected result %d\n",
result);
SkDEBUGFAIL("encountered unexpected result");
break;
} }
} }
} }
@ -735,7 +722,7 @@ public:
SkBitmap& actualBitmap, SkDynamicMemoryWStream* pdf) { SkBitmap& actualBitmap, SkDynamicMemoryWStream* pdf) {
SkString name = make_name(gm->shortName(), gRec.fName); SkString name = make_name(gm->shortName(), gRec.fName);
ErrorBitfield retval = ERROR_NONE; ErrorBitfield retval = kEmptyErrorBitfield;
ExpectationsSource *expectationsSource = ExpectationsSource *expectationsSource =
this->fExpectationsSource.get(); this->fExpectationsSource.get();
@ -762,7 +749,7 @@ public:
Checksum actualChecksum = Checksum actualChecksum =
SkBitmapChecksummer::Compute64(actualBitmap); SkBitmapChecksummer::Compute64(actualBitmap);
add_actual_results_to_json_summary(name.c_str(), actualChecksum, add_actual_results_to_json_summary(name.c_str(), actualChecksum,
ERROR_READING_REFERENCE_IMAGE, kMissingExpectations_ErrorBitmask,
false); false);
} }
@ -862,7 +849,7 @@ public:
// Early exit if we can't generate the image. // Early exit if we can't generate the image.
ErrorBitfield errors = generate_image(gm, gRec, context, rt, bitmap, ErrorBitfield errors = generate_image(gm, gRec, context, rt, bitmap,
false); false);
if (ERROR_NONE != errors) { if (kEmptyErrorBitfield != errors) {
// TODO: Add a test to exercise what the stdout and // TODO: Add a test to exercise what the stdout and
// JSON look like if we get an "early error" while // JSON look like if we get an "early error" while
// trying to generate the image. // trying to generate the image.
@ -895,18 +882,18 @@ public:
// Early exit if we can't generate the image, but this is // Early exit if we can't generate the image, but this is
// expected in some cases, so don't report a test failure. // expected in some cases, so don't report a test failure.
if (!generate_image(gm, gRec, context, rt, &bitmap, true)) { if (!generate_image(gm, gRec, context, rt, &bitmap, true)) {
return ERROR_NONE; return kEmptyErrorBitfield;
} }
return compare_test_results_to_reference_bitmap( return compare_test_results_to_reference_bitmap(
gm, gRec, "-deferred", bitmap, &referenceBitmap); gm, gRec, "-deferred", bitmap, &referenceBitmap);
} }
return ERROR_NONE; return kEmptyErrorBitfield;
} }
ErrorBitfield test_pipe_playback(GM* gm, ErrorBitfield test_pipe_playback(GM* gm,
const ConfigData& gRec, const ConfigData& gRec,
const SkBitmap& referenceBitmap) { const SkBitmap& referenceBitmap) {
ErrorBitfield errors = ERROR_NONE; ErrorBitfield errors = kEmptyErrorBitfield;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
SkBitmap bitmap; SkBitmap bitmap;
SkISize size = gm->getISize(); SkISize size = gm->getISize();
@ -923,7 +910,7 @@ public:
string.append(gPipeWritingFlagCombos[i].name); string.append(gPipeWritingFlagCombos[i].name);
errors |= compare_test_results_to_reference_bitmap( errors |= compare_test_results_to_reference_bitmap(
gm, gRec, string.c_str(), bitmap, &referenceBitmap); gm, gRec, string.c_str(), bitmap, &referenceBitmap);
if (errors != ERROR_NONE) { if (errors != kEmptyErrorBitfield) {
break; break;
} }
} }
@ -932,7 +919,7 @@ public:
ErrorBitfield test_tiled_pipe_playback( ErrorBitfield test_tiled_pipe_playback(
GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap) { GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap) {
ErrorBitfield errors = ERROR_NONE; ErrorBitfield errors = kEmptyErrorBitfield;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
SkBitmap bitmap; SkBitmap bitmap;
SkISize size = gm->getISize(); SkISize size = gm->getISize();
@ -949,7 +936,7 @@ public:
string.append(gPipeWritingFlagCombos[i].name); string.append(gPipeWritingFlagCombos[i].name);
errors |= compare_test_results_to_reference_bitmap( errors |= compare_test_results_to_reference_bitmap(
gm, gRec, string.c_str(), bitmap, &referenceBitmap); gm, gRec, string.c_str(), bitmap, &referenceBitmap);
if (errors != ERROR_NONE) { if (errors != kEmptyErrorBitfield) {
break; break;
} }
} }
@ -1464,7 +1451,7 @@ int tool_main(int argc, char** argv) {
SkDebugf("%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName, SkDebugf("%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName,
size.width(), size.height()); size.width(), size.height());
ErrorBitfield testErrors = ERROR_NONE; ErrorBitfield testErrors = kEmptyErrorBitfield;
uint32_t gmFlags = gm->getFlags(); uint32_t gmFlags = gm->getFlags();
for (int i = 0; i < configs.count(); i++) { for (int i = 0; i < configs.count(); i++) {
@ -1484,12 +1471,12 @@ int tool_main(int argc, char** argv) {
// Now we know that we want to run this test and record its // Now we know that we want to run this test and record its
// success or failure. // success or failure.
ErrorBitfield renderErrors = ERROR_NONE; ErrorBitfield renderErrors = kEmptyErrorBitfield;
GrRenderTarget* renderTarget = NULL; GrRenderTarget* renderTarget = NULL;
#if SK_SUPPORT_GPU #if SK_SUPPORT_GPU
SkAutoTUnref<GrRenderTarget> rt; SkAutoTUnref<GrRenderTarget> rt;
AutoResetGr autogr; AutoResetGr autogr;
if ((ERROR_NONE == renderErrors) && if ((kEmptyErrorBitfield == renderErrors) &&
kGPU_Backend == config.fBackend) { kGPU_Backend == config.fBackend) {
GrContext* gr = grFactory->get(config.fGLContextType); GrContext* gr = grFactory->get(config.fGLContextType);
bool grSuccess = false; bool grSuccess = false;
@ -1512,14 +1499,14 @@ int tool_main(int argc, char** argv) {
} }
} }
if (!grSuccess) { if (!grSuccess) {
renderErrors |= ERROR_NO_GPU_CONTEXT; renderErrors |= kNoGpuContext_ErrorBitmask;
} }
} }
#endif #endif
SkBitmap comparisonBitmap; SkBitmap comparisonBitmap;
if (ERROR_NONE == renderErrors) { if (kEmptyErrorBitfield == renderErrors) {
renderErrors |= gmmain.test_drawing(gm, config, writePath, renderErrors |= gmmain.test_drawing(gm, config, writePath,
GetGr(), GetGr(),
renderTarget, renderTarget,
@ -1546,13 +1533,13 @@ int tool_main(int argc, char** argv) {
// run the picture centric GM steps // run the picture centric GM steps
if (!(gmFlags & GM::kSkipPicture_Flag)) { if (!(gmFlags & GM::kSkipPicture_Flag)) {
ErrorBitfield pictErrors = ERROR_NONE; ErrorBitfield pictErrors = kEmptyErrorBitfield;
//SkAutoTUnref<SkPicture> pict(generate_new_picture(gm)); //SkAutoTUnref<SkPicture> pict(generate_new_picture(gm));
SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0); SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0);
SkAutoUnref aur(pict); SkAutoUnref aur(pict);
if ((ERROR_NONE == testErrors) && doReplay) { if ((kEmptyErrorBitfield == testErrors) && doReplay) {
SkBitmap bitmap; SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, pict, gmmain.generate_image_from_picture(gm, compareConfig, pict,
&bitmap); &bitmap);
@ -1560,8 +1547,8 @@ int tool_main(int argc, char** argv) {
gm, compareConfig, "-replay", bitmap, &comparisonBitmap); gm, compareConfig, "-replay", bitmap, &comparisonBitmap);
} }
if ((ERROR_NONE == testErrors) && if ((kEmptyErrorBitfield == testErrors) &&
(ERROR_NONE == pictErrors) && (kEmptyErrorBitfield == pictErrors) &&
doSerialize) { doSerialize) {
SkPicture* repict = gmmain.stream_to_new_picture(*pict); SkPicture* repict = gmmain.stream_to_new_picture(*pict);
SkAutoUnref aurr(repict); SkAutoUnref aurr(repict);
@ -1629,15 +1616,15 @@ int tool_main(int argc, char** argv) {
// run the pipe centric GM steps // run the pipe centric GM steps
if (!(gmFlags & GM::kSkipPipe_Flag)) { if (!(gmFlags & GM::kSkipPipe_Flag)) {
ErrorBitfield pipeErrors = ERROR_NONE; ErrorBitfield pipeErrors = kEmptyErrorBitfield;
if ((ERROR_NONE == testErrors) && doPipe) { if ((kEmptyErrorBitfield == testErrors) && doPipe) {
pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig, pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig,
comparisonBitmap); comparisonBitmap);
} }
if ((ERROR_NONE == testErrors) && if ((kEmptyErrorBitfield == testErrors) &&
(ERROR_NONE == pipeErrors) && (kEmptyErrorBitfield == pipeErrors) &&
doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) {
pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig, pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig,
comparisonBitmap); comparisonBitmap);
@ -1652,10 +1639,10 @@ int tool_main(int argc, char** argv) {
// want to also tabulate other error types, we can do so. // want to also tabulate other error types, we can do so.
testsRun++; testsRun++;
if (!gmmain.fExpectationsSource.get() || if (!gmmain.fExpectationsSource.get() ||
(ERROR_READING_REFERENCE_IMAGE & testErrors)) { (kMissingExpectations_ErrorBitmask & testErrors)) {
testsMissingReferenceImages++; testsMissingReferenceImages++;
} }
if (ERROR_NONE == testErrors || ERROR_READING_REFERENCE_IMAGE == testErrors) { if (testErrors == (testErrors & kIgnorable_ErrorBitmask)) {
testsPassed++; testsPassed++;
} else { } else {
testsFailed++; testsFailed++;