re-land r7258 with fixes and tests
BUG=http://code.google.com/p/skia/issues/detail?id=1079 TBR=reed Review URL: https://codereview.appspot.com/7132060 git-svn-id: http://skia.googlecode.com/svn/trunk@7291 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
514b922236
commit
3726960733
174
gm/gm_expectations.h
Normal file
174
gm/gm_expectations.h
Normal file
@ -0,0 +1,174 @@
|
||||
/*
|
||||
* Copyright 2013 Google Inc.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license that can be
|
||||
* found in the LICENSE file.
|
||||
*/
|
||||
#ifndef gm_expectations_DEFINED
|
||||
#define gm_expectations_DEFINED
|
||||
|
||||
#include "gm.h"
|
||||
#include "SkBitmapChecksummer.h"
|
||||
#include "SkImageDecoder.h"
|
||||
#include "SkOSFile.h"
|
||||
#include "SkRefCnt.h"
|
||||
#include "SkTArray.h"
|
||||
|
||||
#ifdef SK_BUILD_FOR_WIN
|
||||
// json includes xlocale which generates warning 4530 because we're compiling without
|
||||
// exceptions; see https://code.google.com/p/skia/issues/detail?id=1067
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4530)
|
||||
#endif
|
||||
#include "json/value.h"
|
||||
#ifdef SK_BUILD_FOR_WIN
|
||||
#pragma warning(pop)
|
||||
#endif
|
||||
|
||||
namespace skiagm {
|
||||
|
||||
// The actual type we use to represent a checksum is hidden in here.
|
||||
typedef Json::UInt64 Checksum;
|
||||
static inline Json::Value asJsonValue(Checksum checksum) {
|
||||
return checksum;
|
||||
}
|
||||
|
||||
static SkString make_filename(const char path[],
|
||||
const char renderModeDescriptor[],
|
||||
const char *name,
|
||||
const char suffix[]) {
|
||||
SkString filename(path);
|
||||
if (filename.endsWith(SkPATH_SEPARATOR)) {
|
||||
filename.remove(filename.size() - 1, 1);
|
||||
}
|
||||
filename.appendf("%c%s%s.%s", SkPATH_SEPARATOR,
|
||||
name, renderModeDescriptor, suffix);
|
||||
return filename;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test expectations (allowed image checksums, etc.)
|
||||
*/
|
||||
class Expectations {
|
||||
public:
|
||||
/**
|
||||
* No expectations at all.
|
||||
*
|
||||
* We set ignoreFailure to false by default, but it doesn't really
|
||||
* matter... the result will always be "no-comparison" anyway.
|
||||
*/
|
||||
Expectations(bool ignoreFailure=false) {
|
||||
fIgnoreFailure = ignoreFailure;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow exactly one checksum (appropriate for the case when we
|
||||
* are comparing against a single PNG file).
|
||||
*
|
||||
* By default, DO NOT ignore failures.
|
||||
*/
|
||||
Expectations(Checksum singleChecksum, bool ignoreFailure=false) {
|
||||
fIgnoreFailure = ignoreFailure;
|
||||
fAllowedChecksums.push_back() = singleChecksum;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true iff we want to ignore failed expectations.
|
||||
*/
|
||||
bool ignoreFailure() const { return this->fIgnoreFailure; }
|
||||
|
||||
/**
|
||||
* Returns true iff there are no allowed checksums.
|
||||
*/
|
||||
bool empty() const { return this->fAllowedChecksums.empty(); }
|
||||
|
||||
/**
|
||||
* Returns true iff actualChecksum matches any allowedChecksum,
|
||||
* regardless of fIgnoreFailure. (The caller can check
|
||||
* that separately.)
|
||||
*/
|
||||
bool match(Checksum actualChecksum) const {
|
||||
for (int i=0; i < this->fAllowedChecksums.count(); i++) {
|
||||
Checksum allowedChecksum = this->fAllowedChecksums[i];
|
||||
if (allowedChecksum == actualChecksum) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a JSON representation of the allowed checksums.
|
||||
* This does NOT include any information about whether to
|
||||
* ignore failures.
|
||||
*/
|
||||
Json::Value allowedChecksumsAsJson() const {
|
||||
Json::Value allowedChecksumArray;
|
||||
if (!this->fAllowedChecksums.empty()) {
|
||||
for (int i=0; i < this->fAllowedChecksums.count(); i++) {
|
||||
Checksum allowedChecksum = this->fAllowedChecksums[i];
|
||||
allowedChecksumArray.append(asJsonValue(allowedChecksum));
|
||||
}
|
||||
}
|
||||
return allowedChecksumArray;
|
||||
}
|
||||
|
||||
private:
|
||||
SkTArray<Checksum> fAllowedChecksums;
|
||||
bool fIgnoreFailure;
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract source of Expectations objects for individual tests.
|
||||
*/
|
||||
class ExpectationsSource : public SkRefCnt {
|
||||
public:
|
||||
virtual Expectations get(const char *testName) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return Expectations based on individual image files on disk.
|
||||
*/
|
||||
class IndividualImageExpectationsSource : public ExpectationsSource {
|
||||
public:
|
||||
/**
|
||||
* Create an ExpectationsSource that will return Expectations based on
|
||||
* image files found within rootDir.
|
||||
*
|
||||
* rootDir: directory under which to look for image files
|
||||
* (this string will be copied to storage within this object)
|
||||
* notifyOfMissingFiles: whether to log a message to stderr if an image
|
||||
* file cannot be found
|
||||
*/
|
||||
IndividualImageExpectationsSource(const char *rootDir,
|
||||
bool notifyOfMissingFiles) :
|
||||
fRootDir(rootDir), fNotifyOfMissingFiles(notifyOfMissingFiles) {}
|
||||
|
||||
Expectations get(const char *testName) SK_OVERRIDE {
|
||||
SkString path = make_filename(fRootDir.c_str(), "", testName,
|
||||
"png");
|
||||
SkBitmap referenceBitmap;
|
||||
bool decodedReferenceBitmap =
|
||||
SkImageDecoder::DecodeFile(path.c_str(), &referenceBitmap,
|
||||
SkBitmap::kARGB_8888_Config,
|
||||
SkImageDecoder::kDecodePixels_Mode,
|
||||
NULL);
|
||||
if (decodedReferenceBitmap) {
|
||||
Checksum checksum = SkBitmapChecksummer::Compute64(
|
||||
referenceBitmap);
|
||||
return Expectations(checksum);
|
||||
} else {
|
||||
if (fNotifyOfMissingFiles) {
|
||||
fprintf(stderr, "FAILED to read %s\n", path.c_str());
|
||||
}
|
||||
return Expectations();
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
const SkString fRootDir;
|
||||
const bool fNotifyOfMissingFiles;
|
||||
};
|
||||
|
||||
}
|
||||
#endif
|
524
gm/gmmain.cpp
524
gm/gmmain.cpp
@ -14,6 +14,7 @@
|
||||
*/
|
||||
|
||||
#include "gm.h"
|
||||
#include "gm_expectations.h"
|
||||
#include "system_preferences.h"
|
||||
#include "SkBitmapChecksummer.h"
|
||||
#include "SkColorPriv.h"
|
||||
@ -35,7 +36,7 @@
|
||||
|
||||
#ifdef SK_BUILD_FOR_WIN
|
||||
// json includes xlocale which generates warning 4530 because we're compiling without
|
||||
// exceptions
|
||||
// exceptions; see https://code.google.com/p/skia/issues/detail?id=1067
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4530)
|
||||
#endif
|
||||
@ -199,7 +200,6 @@ public:
|
||||
GMMain() {
|
||||
// Set default values of member variables, which tool_main()
|
||||
// may override.
|
||||
fNotifyMissingReadReference = true;
|
||||
fUseFileHierarchy = false;
|
||||
fMismatchPath = NULL;
|
||||
}
|
||||
@ -216,17 +216,66 @@ public:
|
||||
return name;
|
||||
}
|
||||
|
||||
static SkString make_filename(const char path[],
|
||||
const char renderModeDescriptor[],
|
||||
const SkString& name,
|
||||
const char suffix[]) {
|
||||
SkString filename(path);
|
||||
if (filename.endsWith(SkPATH_SEPARATOR)) {
|
||||
filename.remove(filename.size() - 1, 1);
|
||||
}
|
||||
filename.appendf("%c%s%s.%s", SkPATH_SEPARATOR,
|
||||
name.c_str(), renderModeDescriptor, suffix);
|
||||
return filename;
|
||||
/**
|
||||
* All calls to get bitmap checksums must go through this
|
||||
* wrapper, so that we can call force_all_opaque() first.
|
||||
* THIS WILL MODIFY THE BITMAP IN-PLACE!
|
||||
*
|
||||
* It's too bad that we are throwing away alpha channel data
|
||||
* we could otherwise be examining, but this had always been happening
|
||||
* before... it was buried within the compare() method at
|
||||
* https://code.google.com/p/skia/source/browse/trunk/gm/gmmain.cpp?r=7289#305 .
|
||||
*
|
||||
* Apparently we need this, at least for bitmaps that are either:
|
||||
* (a) destined to be written out as PNG files, or
|
||||
* (b) compared against bitmaps read in from PNG files
|
||||
* for the reasons described just above the force_all_opaque() method.
|
||||
*
|
||||
* Neglecting to do this led to the difficult-to-diagnose
|
||||
* http://code.google.com/p/skia/issues/detail?id=1079 ('gm generating
|
||||
* spurious pixel_error messages as of r7258')
|
||||
*
|
||||
* TODO(epoger): Come up with a better solution that allows us to
|
||||
* compare full pixel data, including alpha channel, while still being
|
||||
* robust in the face of transformations to/from PNG files.
|
||||
* Options include:
|
||||
*
|
||||
* 1. Continue to call force_all_opaque(), but ONLY for bitmaps that
|
||||
* will be written to, or compared against, PNG files.
|
||||
* PRO: Preserve/compare alpha channel info for the non-PNG cases
|
||||
* (comparing different renderModes in-memory)
|
||||
* CON: The bitmaps (and checksums) for these non-PNG cases would be
|
||||
* different than those for the PNG-compared cases, and in the
|
||||
* case of a failed renderMode comparison, how would we write the
|
||||
* image to disk for examination?
|
||||
*
|
||||
* 2. Always compute image checksums from PNG format (either
|
||||
* directly from the the bytes of a PNG file, or capturing the
|
||||
* bytes we would have written to disk if we were writing the
|
||||
* bitmap out as a PNG).
|
||||
* PRO: I think this would allow us to never force opaque, and to
|
||||
* the extent that alpha channel data can be preserved in a PNG
|
||||
* file, we could observe it.
|
||||
* CON: If we read a bitmap from disk, we need to take its checksum
|
||||
* from the source PNG (we can't compute it from the bitmap we
|
||||
* read out of the PNG, because we will have already premultiplied
|
||||
* the alpha).
|
||||
* CON: Seems wasteful to convert a bitmap to PNG format just to take
|
||||
* its checksum. (Although we're wasting lots of effort already
|
||||
* calling force_all_opaque().)
|
||||
*
|
||||
* 3. Make the alpha premultiply/unpremultiply routines 100% consistent,
|
||||
* so we can transform images back and forth without fear of off-by-one
|
||||
* errors.
|
||||
* CON: Math is hard.
|
||||
*
|
||||
* 4. Perform a "close enough" comparison of bitmaps (+/- 1 bit in each
|
||||
* channel), rather than demanding absolute equality.
|
||||
* CON: Can't do this with checksums.
|
||||
*/
|
||||
static Checksum get_checksum(const SkBitmap& bitmap) {
|
||||
force_all_opaque(bitmap);
|
||||
return SkBitmapChecksummer::Compute64(bitmap);
|
||||
}
|
||||
|
||||
/* since PNG insists on unpremultiplying our alpha, we take no
|
||||
@ -284,51 +333,6 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
// Compares "target" and "base" bitmaps, returning the result
|
||||
// (ERROR_NONE if the two bitmaps are identical).
|
||||
ErrorBitfield compare(const SkBitmap& target, const SkBitmap& base,
|
||||
const SkString& name,
|
||||
const char* renderModeDescriptor) {
|
||||
SkBitmap copy;
|
||||
const SkBitmap* bm = ⌖
|
||||
if (target.config() != SkBitmap::kARGB_8888_Config) {
|
||||
target.copyTo(©, SkBitmap::kARGB_8888_Config);
|
||||
bm = ©
|
||||
}
|
||||
SkBitmap baseCopy;
|
||||
const SkBitmap* bp = &base;
|
||||
if (base.config() != SkBitmap::kARGB_8888_Config) {
|
||||
base.copyTo(&baseCopy, SkBitmap::kARGB_8888_Config);
|
||||
bp = &baseCopy;
|
||||
}
|
||||
|
||||
force_all_opaque(*bm);
|
||||
force_all_opaque(*bp);
|
||||
|
||||
const int w = bm->width();
|
||||
const int h = bm->height();
|
||||
if (w != bp->width() || h != bp->height()) {
|
||||
RecordError(ERROR_IMAGE_MISMATCH, name, renderModeDescriptor);
|
||||
return ERROR_IMAGE_MISMATCH;
|
||||
}
|
||||
|
||||
SkAutoLockPixels bmLock(*bm);
|
||||
SkAutoLockPixels baseLock(*bp);
|
||||
|
||||
for (int y = 0; y < h; y++) {
|
||||
for (int x = 0; x < w; x++) {
|
||||
SkPMColor c0 = *bp->getAddr32(x, y);
|
||||
SkPMColor c1 = *bm->getAddr32(x, y);
|
||||
if (c0 != c1) {
|
||||
RecordError(ERROR_IMAGE_MISMATCH, name,
|
||||
renderModeDescriptor);
|
||||
return ERROR_IMAGE_MISMATCH;
|
||||
}
|
||||
}
|
||||
}
|
||||
return ERROR_NONE;
|
||||
}
|
||||
|
||||
static bool write_document(const SkString& path,
|
||||
const SkDynamicMemoryWStream& document) {
|
||||
SkFILEWStream stream(path.c_str());
|
||||
@ -485,15 +489,18 @@ public:
|
||||
gRec.fBackend == kGPU_Backend ||
|
||||
(gRec.fBackend == kPDF_Backend && CAN_IMAGE_PDF)) {
|
||||
|
||||
path = make_filename(writePath, renderModeDescriptor, name, "png");
|
||||
path = make_filename(writePath, renderModeDescriptor, name.c_str(),
|
||||
"png");
|
||||
success = write_bitmap(path, bitmap);
|
||||
}
|
||||
if (kPDF_Backend == gRec.fBackend) {
|
||||
path = make_filename(writePath, renderModeDescriptor, name, "pdf");
|
||||
path = make_filename(writePath, renderModeDescriptor, name.c_str(),
|
||||
"pdf");
|
||||
success = write_document(path, *document);
|
||||
}
|
||||
if (kXPS_Backend == gRec.fBackend) {
|
||||
path = make_filename(writePath, renderModeDescriptor, name, "xps");
|
||||
path = make_filename(writePath, renderModeDescriptor, name.c_str(),
|
||||
"xps");
|
||||
success = write_document(path, *document);
|
||||
}
|
||||
if (success) {
|
||||
@ -506,117 +513,208 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
// Compares bitmap "bitmap" to a reference bitmap read from disk.
|
||||
//
|
||||
// Returns a description of the difference between "bitmap" and
|
||||
// the reference bitmap, or ERROR_READING_REFERENCE_IMAGE if
|
||||
// unable to read the reference bitmap from disk.
|
||||
ErrorBitfield compare_to_reference_image_on_disk(
|
||||
const char readPath [], const SkString& name, SkBitmap &bitmap,
|
||||
const char renderModeDescriptor []) {
|
||||
/**
|
||||
* Compares actual checksum to expectations.
|
||||
* Returns ERROR_NONE if they match, or some particular error code otherwise
|
||||
*
|
||||
* If fMismatchPath has been set, and there are pixel diffs, then the
|
||||
* actual bitmap will be written out to a file within fMismatchPath.
|
||||
*
|
||||
* @param expectations what expectations to compare actualBitmap against
|
||||
* @param actualBitmap the image we actually generated
|
||||
* @param baseNameString name of test without renderModeDescriptor added
|
||||
* @param renderModeDescriptor e.g., "-rtree", "-deferred"
|
||||
* @param addToJsonSummary whether to add these results (both actual and
|
||||
* expected) to the JSON summary
|
||||
*
|
||||
* TODO: For now, addToJsonSummary is only set to true within
|
||||
* compare_test_results_to_stored_expectations(), so results of our
|
||||
* in-memory comparisons (Rtree vs regular, etc.) are not written to the
|
||||
* JSON summary. We may wish to change that.
|
||||
*/
|
||||
ErrorBitfield compare_to_expectations(Expectations expectations,
|
||||
const SkBitmap& actualBitmap,
|
||||
const SkString& baseNameString,
|
||||
const char renderModeDescriptor[],
|
||||
bool addToJsonSummary=false) {
|
||||
ErrorBitfield retval;
|
||||
SkString path = make_filename(readPath, "", name, "png");
|
||||
SkBitmap referenceBitmap;
|
||||
Json::Value expectedChecksumsArray;
|
||||
Checksum actualChecksum = get_checksum(actualBitmap);
|
||||
SkString completeNameString = baseNameString;
|
||||
completeNameString.append(renderModeDescriptor);
|
||||
const char* completeName = completeNameString.c_str();
|
||||
|
||||
bool decodedReferenceBitmap =
|
||||
SkImageDecoder::DecodeFile(path.c_str(), &referenceBitmap,
|
||||
SkBitmap::kARGB_8888_Config,
|
||||
SkImageDecoder::kDecodePixels_Mode,
|
||||
NULL);
|
||||
if (decodedReferenceBitmap) {
|
||||
expectedChecksumsArray.append(Json::UInt64(
|
||||
SkBitmapChecksummer::Compute64(referenceBitmap)));
|
||||
retval = compare(bitmap, referenceBitmap, name,
|
||||
renderModeDescriptor);
|
||||
if (fMismatchPath && (retval & ERROR_IMAGE_MISMATCH)) {
|
||||
SkString path = make_filename(fMismatchPath, renderModeDescriptor, name, "png");
|
||||
write_bitmap(path, bitmap);
|
||||
}
|
||||
} else {
|
||||
if (fNotifyMissingReadReference) {
|
||||
fprintf(stderr, "FAILED to read %s\n", path.c_str());
|
||||
}
|
||||
RecordError(ERROR_READING_REFERENCE_IMAGE, name,
|
||||
renderModeDescriptor);
|
||||
if (expectations.empty()) {
|
||||
retval = ERROR_READING_REFERENCE_IMAGE;
|
||||
}
|
||||
|
||||
// Add this result to the appropriate JSON collection of actual results,
|
||||
// depending on status.
|
||||
Json::Value actualResults;
|
||||
actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] = Json::UInt64(
|
||||
SkBitmapChecksummer::Compute64(bitmap));
|
||||
if (decodedReferenceBitmap) {
|
||||
if (ERROR_NONE == retval) {
|
||||
fJsonActualResults_Succeeded[name.c_str()] = actualResults;
|
||||
} else {
|
||||
fJsonActualResults_Failed[name.c_str()] = actualResults;
|
||||
}
|
||||
} else if (expectations.match(actualChecksum)) {
|
||||
retval = ERROR_NONE;
|
||||
} else {
|
||||
fJsonActualResults_NoComparison[name.c_str()] = actualResults;
|
||||
retval = ERROR_IMAGE_MISMATCH;
|
||||
if (fMismatchPath) {
|
||||
SkString path =
|
||||
make_filename(fMismatchPath, renderModeDescriptor,
|
||||
baseNameString.c_str(), "png");
|
||||
write_bitmap(path, actualBitmap);
|
||||
}
|
||||
}
|
||||
RecordError(retval, baseNameString, renderModeDescriptor);
|
||||
|
||||
// Add this test to the JSON collection of expected results.
|
||||
// For now, we assume that this collection starts out empty and we
|
||||
// just fill it in as we go; once gm accepts a JSON file as input,
|
||||
// we'll have to change that.
|
||||
Json::Value expectedResults;
|
||||
expectedResults[kJsonKey_ExpectedResults_Checksums] = expectedChecksumsArray;
|
||||
expectedResults[kJsonKey_ExpectedResults_IgnoreFailure] = !decodedReferenceBitmap;
|
||||
fJsonExpectedResults[name.c_str()] = expectedResults;
|
||||
if (addToJsonSummary) {
|
||||
add_actual_results_to_json_summary(completeName, actualChecksum,
|
||||
retval,
|
||||
expectations.ignoreFailure());
|
||||
add_expected_results_to_json_summary(completeName, expectations);
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
// NOTE: As far as I can tell, this function is NEVER called with a
|
||||
// non-blank renderModeDescriptor, EXCEPT when readPath and writePath are
|
||||
// both NULL (and thus no images are read from or written to disk).
|
||||
// So I don't trust that the renderModeDescriptor is being used for
|
||||
// anything other than debug output these days.
|
||||
ErrorBitfield handle_test_results(GM* gm,
|
||||
const ConfigData& gRec,
|
||||
const char writePath [],
|
||||
const char readPath [],
|
||||
const char renderModeDescriptor [],
|
||||
SkBitmap& bitmap,
|
||||
SkDynamicMemoryWStream* pdf,
|
||||
const SkBitmap* referenceBitmap) {
|
||||
/**
|
||||
* Add this result to the appropriate JSON collection of actual results,
|
||||
* depending on status.
|
||||
*/
|
||||
void add_actual_results_to_json_summary(const char testName[],
|
||||
Checksum actualChecksum,
|
||||
ErrorBitfield result,
|
||||
bool ignoreFailure) {
|
||||
Json::Value actualResults;
|
||||
actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] =
|
||||
asJsonValue(actualChecksum);
|
||||
if (ERROR_NONE == result) {
|
||||
this->fJsonActualResults_Succeeded[testName] = actualResults;
|
||||
} else {
|
||||
if (ignoreFailure) {
|
||||
// TODO: Once we have added the ability to compare
|
||||
// actual results against expectations in a JSON file
|
||||
// (where we can set ignore-failure to either true or
|
||||
// false), add tests cases that exercise ignored
|
||||
// failures (both for ERROR_READING_REFERENCE_IMAGE
|
||||
// and ERROR_IMAGE_MISMATCH).
|
||||
this->fJsonActualResults_FailureIgnored[testName] =
|
||||
actualResults;
|
||||
} else {
|
||||
switch(result) {
|
||||
case ERROR_READING_REFERENCE_IMAGE:
|
||||
// TODO: What about the case where there IS an
|
||||
// expected image checksum, but that gm test
|
||||
// doesn't actually run? For now, those cases
|
||||
// will always be ignored, because gm only looks
|
||||
// at expectations that correspond to gm tests
|
||||
// that were actually run.
|
||||
//
|
||||
// Once we have the ability to express
|
||||
// expectations as a JSON file, we should fix this
|
||||
// (and add a test case for which an expectation
|
||||
// is given but the test is never run).
|
||||
this->fJsonActualResults_NoComparison[testName] =
|
||||
actualResults;
|
||||
break;
|
||||
case ERROR_IMAGE_MISMATCH:
|
||||
this->fJsonActualResults_Failed[testName] = actualResults;
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "encountered unexpected result %d\n",
|
||||
result);
|
||||
SkDEBUGFAIL("encountered unexpected result");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add this test to the JSON collection of expected results.
|
||||
*/
|
||||
void add_expected_results_to_json_summary(const char testName[],
|
||||
Expectations expectations) {
|
||||
// For now, we assume that this collection starts out empty and we
|
||||
// just fill it in as we go; once gm accepts a JSON file as input,
|
||||
// we'll have to change that.
|
||||
Json::Value expectedResults;
|
||||
expectedResults[kJsonKey_ExpectedResults_Checksums] =
|
||||
expectations.allowedChecksumsAsJson();
|
||||
expectedResults[kJsonKey_ExpectedResults_IgnoreFailure] =
|
||||
expectations.ignoreFailure();
|
||||
this->fJsonExpectedResults[testName] = expectedResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare actualBitmap to expectations stored in this->fExpectationsSource.
|
||||
*
|
||||
* @param gm which test generated the actualBitmap
|
||||
* @param gRec
|
||||
* @param writePath unless this is NULL, write out actual images into this
|
||||
* directory
|
||||
* @param actualBitmap bitmap generated by this run
|
||||
* @param pdf
|
||||
*/
|
||||
ErrorBitfield compare_test_results_to_stored_expectations(
|
||||
GM* gm, const ConfigData& gRec, const char writePath[],
|
||||
SkBitmap& actualBitmap, SkDynamicMemoryWStream* pdf) {
|
||||
|
||||
SkString name = make_name(gm->shortName(), gRec.fName);
|
||||
ErrorBitfield retval = ERROR_NONE;
|
||||
|
||||
if (readPath && (gRec.fFlags & kRead_ConfigFlag)) {
|
||||
retval |= compare_to_reference_image_on_disk(readPath, name, bitmap,
|
||||
renderModeDescriptor);
|
||||
} else if (NULL == referenceBitmap) {
|
||||
// If we are running without "--readPath", we still want to
|
||||
ExpectationsSource *expectationsSource =
|
||||
this->fExpectationsSource.get();
|
||||
if (expectationsSource && (gRec.fFlags & kRead_ConfigFlag)) {
|
||||
/*
|
||||
* Get the expected results for this test, as one or more allowed
|
||||
* checksums. The current implementation of expectationsSource
|
||||
* get this by computing the checksum of a single PNG file on disk.
|
||||
*
|
||||
* TODO(epoger): This relies on the fact that
|
||||
* force_all_opaque() was called on the bitmap before it
|
||||
* was written to disk as a PNG in the first place. If
|
||||
* not, the checksum returned here may not match the
|
||||
* checksum of actualBitmap, which *has* been run through
|
||||
* force_all_opaque().
|
||||
* See comments above get_checksum() for more detail.
|
||||
*/
|
||||
Expectations expectations = expectationsSource->get(name.c_str());
|
||||
retval |= compare_to_expectations(expectations, actualBitmap,
|
||||
name, "", true);
|
||||
} else {
|
||||
// If we are running without expectations, we still want to
|
||||
// record the actual results.
|
||||
//
|
||||
// For now, though, we don't record results of comparisons against
|
||||
// different in-memory representations (hence the referenceBitmap
|
||||
// NULL check).
|
||||
Json::Value actualResults;
|
||||
actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] =
|
||||
Json::UInt64(SkBitmapChecksummer::Compute64(bitmap));
|
||||
fJsonActualResults_NoComparison[name.c_str()] = actualResults;
|
||||
Checksum actualChecksum = get_checksum(actualBitmap);
|
||||
add_actual_results_to_json_summary(name.c_str(), actualChecksum,
|
||||
ERROR_READING_REFERENCE_IMAGE,
|
||||
false);
|
||||
}
|
||||
|
||||
// TODO: Consider moving this into compare_to_expectations(),
|
||||
// similar to fMismatchPath... for now, we don't do that, because
|
||||
// we don't want to write out the actual bitmaps for all
|
||||
// renderModes of all tests! That would be a lot of files.
|
||||
if (writePath && (gRec.fFlags & kWrite_ConfigFlag)) {
|
||||
retval |= write_reference_image(gRec, writePath,
|
||||
renderModeDescriptor,
|
||||
name, bitmap, pdf);
|
||||
}
|
||||
if (referenceBitmap) {
|
||||
ErrorBitfield compareResult = compare(bitmap, *referenceBitmap, name,
|
||||
renderModeDescriptor);
|
||||
if (fMismatchPath && (compareResult & ERROR_IMAGE_MISMATCH)) {
|
||||
SkString path = make_filename(fMismatchPath, renderModeDescriptor, name, "png");
|
||||
write_bitmap(path, bitmap);
|
||||
}
|
||||
retval |= compareResult;
|
||||
retval |= write_reference_image(gRec, writePath, "",
|
||||
name, actualBitmap, pdf);
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare actualBitmap to referenceBitmap.
|
||||
*
|
||||
* @param gm which test generated the bitmap
|
||||
* @param gRec
|
||||
* @param renderModeDescriptor
|
||||
* @param actualBitmap actual bitmap generated by this run
|
||||
* @param referenceBitmap bitmap we expected to be generated
|
||||
*/
|
||||
ErrorBitfield compare_test_results_to_reference_bitmap(
|
||||
GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
|
||||
SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
|
||||
|
||||
SkASSERT(referenceBitmap);
|
||||
SkString name = make_name(gm->shortName(), gRec.fName);
|
||||
Checksum referenceChecksum = get_checksum(*referenceBitmap);
|
||||
Expectations expectations(referenceChecksum);
|
||||
return compare_to_expectations(expectations, actualBitmap,
|
||||
name, renderModeDescriptor);
|
||||
}
|
||||
|
||||
static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags,
|
||||
SkScalar scale = SK_Scalar1) {
|
||||
// Pictures are refcounted so must be on heap
|
||||
@ -667,7 +765,6 @@ public:
|
||||
ErrorBitfield test_drawing(GM* gm,
|
||||
const ConfigData& gRec,
|
||||
const char writePath [],
|
||||
const char readPath [],
|
||||
GrContext* context,
|
||||
GrRenderTarget* rt,
|
||||
SkBitmap* bitmap) {
|
||||
@ -679,6 +776,9 @@ public:
|
||||
ErrorBitfield errors = generate_image(gm, gRec, context, rt, bitmap,
|
||||
false);
|
||||
if (ERROR_NONE != errors) {
|
||||
// TODO: Add a test to exercise what the stdout and
|
||||
// JSON look like if we get an "early error" while
|
||||
// trying to generate the image.
|
||||
return errors;
|
||||
}
|
||||
} else if (gRec.fBackend == kPDF_Backend) {
|
||||
@ -691,8 +791,8 @@ public:
|
||||
} else if (gRec.fBackend == kXPS_Backend) {
|
||||
generate_xps(gm, document);
|
||||
}
|
||||
return handle_test_results(gm, gRec, writePath, readPath,
|
||||
"", *bitmap, &document, NULL);
|
||||
return compare_test_results_to_stored_expectations(
|
||||
gm, gRec, writePath, *bitmap, &document);
|
||||
}
|
||||
|
||||
ErrorBitfield test_deferred_drawing(GM* gm,
|
||||
@ -710,17 +810,15 @@ public:
|
||||
if (!generate_image(gm, gRec, context, rt, &bitmap, true)) {
|
||||
return ERROR_NONE;
|
||||
}
|
||||
return handle_test_results(gm, gRec, NULL, NULL,
|
||||
"-deferred", bitmap, NULL,
|
||||
&referenceBitmap);
|
||||
return compare_test_results_to_reference_bitmap(
|
||||
gm, gRec, "-deferred", bitmap, &referenceBitmap);
|
||||
}
|
||||
return ERROR_NONE;
|
||||
}
|
||||
|
||||
ErrorBitfield test_pipe_playback(GM* gm,
|
||||
const ConfigData& gRec,
|
||||
const SkBitmap& referenceBitmap,
|
||||
const char readPath []) {
|
||||
const SkBitmap& referenceBitmap) {
|
||||
ErrorBitfield errors = ERROR_NONE;
|
||||
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
|
||||
SkBitmap bitmap;
|
||||
@ -735,9 +833,8 @@ public:
|
||||
writer.endRecording();
|
||||
SkString string("-pipe");
|
||||
string.append(gPipeWritingFlagCombos[i].name);
|
||||
errors |= handle_test_results(gm, gRec, NULL, NULL,
|
||||
string.c_str(), bitmap, NULL,
|
||||
&referenceBitmap);
|
||||
errors |= compare_test_results_to_reference_bitmap(
|
||||
gm, gRec, string.c_str(), bitmap, &referenceBitmap);
|
||||
if (errors != ERROR_NONE) {
|
||||
break;
|
||||
}
|
||||
@ -746,8 +843,7 @@ public:
|
||||
}
|
||||
|
||||
ErrorBitfield test_tiled_pipe_playback(
|
||||
GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap,
|
||||
const char readPath []) {
|
||||
GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap) {
|
||||
ErrorBitfield errors = ERROR_NONE;
|
||||
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
|
||||
SkBitmap bitmap;
|
||||
@ -762,9 +858,8 @@ public:
|
||||
writer.endRecording();
|
||||
SkString string("-tiled pipe");
|
||||
string.append(gPipeWritingFlagCombos[i].name);
|
||||
errors |= handle_test_results(gm, gRec, NULL, NULL,
|
||||
string.c_str(), bitmap, NULL,
|
||||
&referenceBitmap);
|
||||
errors |= compare_test_results_to_reference_bitmap(
|
||||
gm, gRec, string.c_str(), bitmap, &referenceBitmap);
|
||||
if (errors != ERROR_NONE) {
|
||||
break;
|
||||
}
|
||||
@ -777,9 +872,6 @@ public:
|
||||
// They are public for now, to allow easier setting by tool_main().
|
||||
//
|
||||
|
||||
// if true, emit a message when we can't find a reference image to compare
|
||||
bool fNotifyMissingReadReference;
|
||||
|
||||
bool fUseFileHierarchy;
|
||||
|
||||
const char* fMismatchPath;
|
||||
@ -787,6 +879,11 @@ public:
|
||||
// information about all failed tests we have encountered so far
|
||||
SkTArray<FailRec> fFailedTests;
|
||||
|
||||
// Where to read expectations (expected image checksums, etc.) from.
|
||||
// If unset, we don't do comparisons.
|
||||
SkAutoTUnref<ExpectationsSource> fExpectationsSource;
|
||||
|
||||
// JSON summaries that we generate as we go (just for output).
|
||||
Json::Value fJsonExpectedResults;
|
||||
Json::Value fJsonActualResults_Failed;
|
||||
Json::Value fJsonActualResults_FailureIgnored;
|
||||
@ -978,6 +1075,9 @@ int tool_main(int argc, char** argv) {
|
||||
const char* readPath = NULL; // if non-null, were we read from to compare
|
||||
const char* resourcePath = NULL;// if non-null, where we read from for image resources
|
||||
|
||||
// if true, emit a message when we can't find a reference image to compare
|
||||
bool notifyMissingReadReference = true;
|
||||
|
||||
SkTDArray<const char*> fMatches;
|
||||
|
||||
bool doPDF = true;
|
||||
@ -1042,7 +1142,7 @@ int tool_main(int argc, char** argv) {
|
||||
} else if (strcmp(*argv, "--nodeferred") == 0) {
|
||||
doDeferred = false;
|
||||
} else if (strcmp(*argv, "--disable-missing-warning") == 0) {
|
||||
gmmain.fNotifyMissingReadReference = false;
|
||||
notifyMissingReadReference = false;
|
||||
} else if (strcmp(*argv, "--mismatchPath") == 0) {
|
||||
argv++;
|
||||
if (argv < stop && **argv) {
|
||||
@ -1071,7 +1171,7 @@ int tool_main(int argc, char** argv) {
|
||||
return -1;
|
||||
}
|
||||
} else if (strcmp(*argv, "--enable-missing-warning") == 0) {
|
||||
gmmain.fNotifyMissingReadReference = true;
|
||||
notifyMissingReadReference = true;
|
||||
} else if (strcmp(*argv, "--forceBWtext") == 0) {
|
||||
gForceBWtext = true;
|
||||
} else if (strcmp(*argv, "--help") == 0 || strcmp(*argv, "-h") == 0) {
|
||||
@ -1186,7 +1286,21 @@ int tool_main(int argc, char** argv) {
|
||||
GM::SetResourcePath(resourcePath);
|
||||
|
||||
if (readPath) {
|
||||
fprintf(stderr, "reading from %s\n", readPath);
|
||||
if (!sk_exists(readPath)) {
|
||||
fprintf(stderr, "readPath %s does not exist!\n", readPath);
|
||||
return -1;
|
||||
}
|
||||
if (sk_isdir(readPath)) {
|
||||
fprintf(stderr, "reading from %s\n", readPath);
|
||||
gmmain.fExpectationsSource.reset(SkNEW_ARGS(
|
||||
IndividualImageExpectationsSource,
|
||||
(readPath, notifyMissingReadReference)));
|
||||
} else {
|
||||
fprintf(stderr, "reading expectations from JSON summary file %s ",
|
||||
readPath);
|
||||
fprintf(stderr, "BUT WE DON'T KNOW HOW TO DO THIS YET!\n");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
if (writePath) {
|
||||
fprintf(stderr, "writing to %s\n", writePath);
|
||||
@ -1318,7 +1432,7 @@ int tool_main(int argc, char** argv) {
|
||||
|
||||
if (ERROR_NONE == renderErrors) {
|
||||
renderErrors |= gmmain.test_drawing(gm, config, writePath,
|
||||
readPath, GetGr(),
|
||||
GetGr(),
|
||||
renderTarget,
|
||||
&comparisonBitmap);
|
||||
}
|
||||
@ -1353,11 +1467,8 @@ int tool_main(int argc, char** argv) {
|
||||
SkBitmap bitmap;
|
||||
gmmain.generate_image_from_picture(gm, compareConfig, pict,
|
||||
&bitmap);
|
||||
pictErrors |= gmmain.handle_test_results(gm, compareConfig,
|
||||
NULL, NULL,
|
||||
"-replay", bitmap,
|
||||
NULL,
|
||||
&comparisonBitmap);
|
||||
pictErrors |= gmmain.compare_test_results_to_reference_bitmap(
|
||||
gm, compareConfig, "-replay", bitmap, &comparisonBitmap);
|
||||
}
|
||||
|
||||
if ((ERROR_NONE == testErrors) &&
|
||||
@ -1369,18 +1480,15 @@ int tool_main(int argc, char** argv) {
|
||||
SkBitmap bitmap;
|
||||
gmmain.generate_image_from_picture(gm, compareConfig, repict,
|
||||
&bitmap);
|
||||
pictErrors |= gmmain.handle_test_results(gm, compareConfig,
|
||||
NULL, NULL,
|
||||
"-serialize", bitmap,
|
||||
NULL,
|
||||
&comparisonBitmap);
|
||||
pictErrors |= gmmain.compare_test_results_to_reference_bitmap(
|
||||
gm, compareConfig, "-serialize", bitmap, &comparisonBitmap);
|
||||
}
|
||||
|
||||
if (writePicturePath) {
|
||||
const char* pictureSuffix = "skp";
|
||||
SkString path = gmmain.make_filename(writePicturePath, "",
|
||||
SkString(gm->shortName()),
|
||||
pictureSuffix);
|
||||
SkString path = make_filename(writePicturePath, "",
|
||||
gm->shortName(),
|
||||
pictureSuffix);
|
||||
SkFILEWStream stream(path.c_str());
|
||||
pict->serialize(&stream);
|
||||
}
|
||||
@ -1388,18 +1496,19 @@ int tool_main(int argc, char** argv) {
|
||||
testErrors |= pictErrors;
|
||||
}
|
||||
|
||||
// TODO: add a test in which the RTree rendering results in a
|
||||
// different bitmap than the standard rendering. It should
|
||||
// show up as failed in the JSON summary, and should be listed
|
||||
// in the stdout also.
|
||||
if (!(gmFlags & GM::kSkipPicture_Flag) && doRTree) {
|
||||
SkPicture* pict = gmmain.generate_new_picture(gm, kRTree_BbhType,
|
||||
SkPicture::kUsePathBoundsForClip_RecordingFlag);
|
||||
SkPicture* pict = gmmain.generate_new_picture(
|
||||
gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag);
|
||||
SkAutoUnref aur(pict);
|
||||
SkBitmap bitmap;
|
||||
gmmain.generate_image_from_picture(gm, compareConfig, pict,
|
||||
&bitmap);
|
||||
testErrors |= gmmain.handle_test_results(gm, compareConfig,
|
||||
NULL, NULL,
|
||||
"-rtree", bitmap,
|
||||
NULL,
|
||||
&comparisonBitmap);
|
||||
testErrors |= gmmain.compare_test_results_to_reference_bitmap(
|
||||
gm, compareConfig, "-rtree", bitmap, &comparisonBitmap);
|
||||
}
|
||||
|
||||
if (!(gmFlags & GM::kSkipPicture_Flag) && doTileGrid) {
|
||||
@ -1410,8 +1519,9 @@ int tool_main(int argc, char** argv) {
|
||||
// We record with the reciprocal scale to obtain a replay
|
||||
// result that can be validated against comparisonBitmap.
|
||||
SkScalar recordScale = SkScalarInvert(replayScale);
|
||||
SkPicture* pict = gmmain.generate_new_picture(gm, kTileGrid_BbhType,
|
||||
SkPicture::kUsePathBoundsForClip_RecordingFlag, recordScale);
|
||||
SkPicture* pict = gmmain.generate_new_picture(
|
||||
gm, kTileGrid_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag,
|
||||
recordScale);
|
||||
SkAutoUnref aur(pict);
|
||||
SkBitmap bitmap;
|
||||
gmmain.generate_image_from_picture(gm, compareConfig, pict,
|
||||
@ -1421,11 +1531,9 @@ int tool_main(int argc, char** argv) {
|
||||
suffix += "-scale-";
|
||||
suffix.appendScalar(replayScale);
|
||||
}
|
||||
testErrors |= gmmain.handle_test_results(gm, compareConfig,
|
||||
NULL, NULL,
|
||||
suffix.c_str(), bitmap,
|
||||
NULL,
|
||||
&comparisonBitmap);
|
||||
testErrors |= gmmain.compare_test_results_to_reference_bitmap(
|
||||
gm, compareConfig, suffix.c_str(), bitmap,
|
||||
&comparisonBitmap);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1436,16 +1544,14 @@ int tool_main(int argc, char** argv) {
|
||||
|
||||
if ((ERROR_NONE == testErrors) && doPipe) {
|
||||
pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig,
|
||||
comparisonBitmap,
|
||||
readPath);
|
||||
comparisonBitmap);
|
||||
}
|
||||
|
||||
if ((ERROR_NONE == testErrors) &&
|
||||
(ERROR_NONE == pipeErrors) &&
|
||||
doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) {
|
||||
pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig,
|
||||
comparisonBitmap,
|
||||
readPath);
|
||||
comparisonBitmap);
|
||||
}
|
||||
|
||||
testErrors |= pipeErrors;
|
||||
@ -1454,10 +1560,10 @@ int tool_main(int argc, char** argv) {
|
||||
// Update overall results.
|
||||
// We only tabulate the particular error types that we currently
|
||||
// care about (e.g., missing reference images). Later on, if we
|
||||
// want to also tabulate pixel mismatches vs dimension mistmatches
|
||||
// (or whatever else), we can do so.
|
||||
// want to also tabulate other error types, we can do so.
|
||||
testsRun++;
|
||||
if (!readPath || (ERROR_READING_REFERENCE_IMAGE & testErrors)) {
|
||||
if (!gmmain.fExpectationsSource.get() ||
|
||||
(ERROR_READING_REFERENCE_IMAGE & testErrors)) {
|
||||
testsMissingReferenceImages++;
|
||||
} else if (ERROR_NONE == testErrors) {
|
||||
testsPassed++;
|
||||
|
@ -0,0 +1 @@
|
||||
out/Debug/gm --match aaclip --config 8888 -r gm/tests/tempfiles/aaclip-images --writeJsonSummary gm/tests/outputs/aaclip-readback/output-actual/json-summary.txt
|
@ -0,0 +1,39 @@
|
||||
{
|
||||
"actual-results" : {
|
||||
"failed" : null,
|
||||
"failure-ignored" : null,
|
||||
"no-comparison" : null,
|
||||
"succeeded" : {
|
||||
"aaclip_8888" : {
|
||||
"checksum" : FAKE
|
||||
},
|
||||
"simpleaaclip_aaclip_8888" : {
|
||||
"checksum" : FAKE
|
||||
},
|
||||
"simpleaaclip_path_8888" : {
|
||||
"checksum" : FAKE
|
||||
},
|
||||
"simpleaaclip_rect_8888" : {
|
||||
"checksum" : FAKE
|
||||
}
|
||||
}
|
||||
},
|
||||
"expected-results" : {
|
||||
"aaclip_8888" : {
|
||||
"checksums" : [ FAKE ],
|
||||
"ignore-failure" : false
|
||||
},
|
||||
"simpleaaclip_aaclip_8888" : {
|
||||
"checksums" : [ FAKE ],
|
||||
"ignore-failure" : false
|
||||
},
|
||||
"simpleaaclip_path_8888" : {
|
||||
"checksums" : [ FAKE ],
|
||||
"ignore-failure" : false
|
||||
},
|
||||
"simpleaaclip_rect_8888" : {
|
||||
"checksums" : [ FAKE ],
|
||||
"ignore-failure" : false
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1 @@
|
||||
0
|
6
gm/tests/outputs/aaclip-readback/output-expected/stdout
Normal file
6
gm/tests/outputs/aaclip-readback/output-expected/stdout
Normal file
@ -0,0 +1,6 @@
|
||||
reading from gm/tests/tempfiles/aaclip-images
|
||||
drawing... aaclip [640 480]
|
||||
drawing... simpleaaclip_aaclip [640 480]
|
||||
drawing... simpleaaclip_path [640 480]
|
||||
drawing... simpleaaclip_rect [640 480]
|
||||
Ran 4 tests: 4 passed, 0 failed, 0 missing reference images
|
@ -0,0 +1 @@
|
||||
out/Debug/gm --match aaclip --config 8888 -w gm/tests/tempfiles/aaclip-images --writeJsonSummary gm/tests/outputs/aaclip-write/output-actual/json-summary.txt
|
@ -0,0 +1,22 @@
|
||||
{
|
||||
"actual-results" : {
|
||||
"failed" : null,
|
||||
"failure-ignored" : null,
|
||||
"no-comparison" : {
|
||||
"aaclip_8888" : {
|
||||
"checksum" : FAKE
|
||||
},
|
||||
"simpleaaclip_aaclip_8888" : {
|
||||
"checksum" : FAKE
|
||||
},
|
||||
"simpleaaclip_path_8888" : {
|
||||
"checksum" : FAKE
|
||||
},
|
||||
"simpleaaclip_rect_8888" : {
|
||||
"checksum" : FAKE
|
||||
}
|
||||
},
|
||||
"succeeded" : null
|
||||
},
|
||||
"expected-results" : null
|
||||
}
|
@ -0,0 +1 @@
|
||||
0
|
6
gm/tests/outputs/aaclip-write/output-expected/stdout
Normal file
6
gm/tests/outputs/aaclip-write/output-expected/stdout
Normal file
@ -0,0 +1,6 @@
|
||||
writing to gm/tests/tempfiles/aaclip-images
|
||||
drawing... aaclip [640 480]
|
||||
drawing... simpleaaclip_aaclip [640 480]
|
||||
drawing... simpleaaclip_path [640 480]
|
||||
drawing... simpleaaclip_rect [640 480]
|
||||
Ran 4 tests: 0 passed, 0 failed, 4 missing reference images
|
@ -12,7 +12,7 @@
|
||||
"expected-results" : {
|
||||
"8888/dashing2" : {
|
||||
"checksums" : null,
|
||||
"ignore-failure" : true
|
||||
"ignore-failure" : false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,6 @@ function compare_directories {
|
||||
|
||||
# Run gm...
|
||||
# - with the arguments in $1
|
||||
# - writing resulting images into $2/$OUTPUT_ACTUAL_SUBDIR/images
|
||||
# - writing stdout into $2/$OUTPUT_ACTUAL_SUBDIR/stdout
|
||||
# - writing json summary into $2/$OUTPUT_ACTUAL_SUBDIR/json-summary.txt
|
||||
# - writing return value into $2/$OUTPUT_ACTUAL_SUBDIR/return_value
|
||||
@ -122,6 +121,7 @@ function create_inputs_dir {
|
||||
GM_TESTDIR=gm/tests
|
||||
GM_INPUTS=$GM_TESTDIR/inputs
|
||||
GM_OUTPUTS=$GM_TESTDIR/outputs
|
||||
GM_TEMPFILES=$GM_TESTDIR/tempfiles
|
||||
|
||||
create_inputs_dir $GM_INPUTS
|
||||
|
||||
@ -142,4 +142,15 @@ gm_test "--hierarchy --match dashing2 --config 8888 -r $GM_INPUTS/empty-dir" "$G
|
||||
# section should be empty.
|
||||
gm_test "--hierarchy --match dashing2 --config 8888" "$GM_OUTPUTS/no-readpath"
|
||||
|
||||
# Write out a handful of test images and read them back in.
|
||||
#
|
||||
# This test would have caught
|
||||
# http://code.google.com/p/skia/issues/detail?id=1079 ('gm generating
|
||||
# spurious pixel_error messages as of r7258').
|
||||
IMAGEDIR=$GM_TEMPFILES/aaclip-images
|
||||
rm -rf $IMAGEDIR
|
||||
mkdir -p $IMAGEDIR
|
||||
gm_test "--match aaclip --config 8888 -w $IMAGEDIR" "$GM_OUTPUTS/aaclip-write"
|
||||
gm_test "--match aaclip --config 8888 -r $IMAGEDIR" "$GM_OUTPUTS/aaclip-readback"
|
||||
|
||||
echo "All tests passed."
|
||||
|
Loading…
Reference in New Issue
Block a user