gm: write all messages to stdout/stderr with "GM:" preamble to distinguish from various debug messages coming from elsewhere

Review URL: https://codereview.chromium.org/12691009

git-svn-id: http://skia.googlecode.com/svn/trunk@8126 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
epoger@google.com 2013-03-13 14:18:40 +00:00
parent 530efc8085
commit 5efdd0cb9c
20 changed files with 102 additions and 88 deletions

10
gm/gm.h
View File

@ -22,11 +22,11 @@
namespace skiagm {
static inline SkISize make_isize(int w, int h) {
SkISize sz;
sz.set(w, h);
return sz;
}
static inline SkISize make_isize(int w, int h) {
SkISize sz;
sz.set(w, h);
return sz;
}
class GM {
public:

View File

@ -7,6 +7,7 @@
#ifndef gm_expectations_DEFINED
#define gm_expectations_DEFINED
#include <stdarg.h>
#include "gm.h"
#include "SkBitmap.h"
#include "SkBitmapChecksummer.h"
@ -53,6 +54,14 @@ namespace skiagm {
return jsonValue.asUInt64();
}
static void gm_fprintf(FILE *stream, const char format[], ...) {
va_list args;
va_start(args, format);
fprintf(stream, "GM: ");
vfprintf(stream, format, args);
va_end(args);
}
static SkString make_filename(const char path[],
const char renderModeDescriptor[],
const char *name,
@ -103,9 +112,10 @@ namespace skiagm {
if (ignoreFailure.isNull()) {
fIgnoreFailure = kDefaultIgnoreFailure;
} else if (!ignoreFailure.isBool()) {
fprintf(stderr, "found non-boolean json value for key '%s' in element '%s'\n",
kJsonKey_ExpectedResults_IgnoreFailure,
jsonElement.toStyledString().c_str());
gm_fprintf(stderr, "found non-boolean json value"
" for key '%s' in element '%s'\n",
kJsonKey_ExpectedResults_IgnoreFailure,
jsonElement.toStyledString().c_str());
DEBUGFAIL_SEE_STDERR;
fIgnoreFailure = kDefaultIgnoreFailure;
} else {
@ -116,16 +126,18 @@ namespace skiagm {
if (allowedChecksums.isNull()) {
// ok, we'll just assume there aren't any expected checksums to compare against
} else if (!allowedChecksums.isArray()) {
fprintf(stderr, "found non-array json value for key '%s' in element '%s'\n",
kJsonKey_ExpectedResults_Checksums,
jsonElement.toStyledString().c_str());
gm_fprintf(stderr, "found non-array json value"
" for key '%s' in element '%s'\n",
kJsonKey_ExpectedResults_Checksums,
jsonElement.toStyledString().c_str());
DEBUGFAIL_SEE_STDERR;
} else {
for (Json::ArrayIndex i=0; i<allowedChecksums.size(); i++) {
Json::Value checksumElement = allowedChecksums[i];
if (!checksumElement.isIntegral()) {
fprintf(stderr, "found non-integer checksum in json element '%s'\n",
jsonElement.toStyledString().c_str());
gm_fprintf(stderr, "found non-integer checksum"
" in json element '%s'\n",
jsonElement.toStyledString().c_str());
DEBUGFAIL_SEE_STDERR;
} else {
fAllowedChecksums.push_back() = asChecksum(checksumElement);
@ -233,7 +245,7 @@ namespace skiagm {
return Expectations(referenceBitmap);
} else {
if (fNotifyOfMissingFiles) {
fprintf(stderr, "FAILED to read %s\n", path.c_str());
gm_fprintf(stderr, "FAILED to read %s\n", path.c_str());
}
return Expectations();
}
@ -331,14 +343,14 @@ namespace skiagm {
static bool parse(const char *jsonPath, Json::Value *jsonRoot) {
SkFILEStream inFile(jsonPath);
if (!inFile.isValid()) {
fprintf(stderr, "unable to read JSON file %s\n", jsonPath);
gm_fprintf(stderr, "unable to read JSON file %s\n", jsonPath);
DEBUGFAIL_SEE_STDERR;
return false;
}
SkAutoDataUnref dataRef(readFileIntoSkData(inFile));
if (NULL == dataRef.get()) {
fprintf(stderr, "error reading JSON file %s\n", jsonPath);
gm_fprintf(stderr, "error reading JSON file %s\n", jsonPath);
DEBUGFAIL_SEE_STDERR;
return false;
}
@ -347,7 +359,7 @@ namespace skiagm {
size_t size = dataRef.get()->size();
Json::Reader reader;
if (!reader.parse(bytes, bytes+size, *jsonRoot)) {
fprintf(stderr, "error parsing JSON file %s\n", jsonPath);
gm_fprintf(stderr, "error parsing JSON file %s\n", jsonPath);
DEBUGFAIL_SEE_STDERR;
return false;
}

View File

@ -223,8 +223,8 @@ public:
// nothing to do here; 565 bitmaps are inherently opaque
break;
default:
fprintf(stderr, "unsupported bitmap config %d\n", config);
SkDEBUGFAIL("unsupported bitmap config");
gm_fprintf(stderr, "unsupported bitmap config %d\n", config);
DEBUGFAIL_SEE_STDERR;
}
}
@ -271,9 +271,9 @@ public:
void ListErrors() {
for (int i = 0; i < fFailedTests.count(); ++i) {
if (fFailedTests[i].fIsPixelError) {
SkDebugf("\t\t%s pixel_error\n", fFailedTests[i].fName.c_str());
gm_fprintf(stderr, "\t\t%s pixel_error\n", fFailedTests[i].fName.c_str());
} else {
SkDebugf("\t\t%s\n", fFailedTests[i].fName.c_str());
gm_fprintf(stderr, "\t\t%s\n", fFailedTests[i].fName.c_str());
}
}
}
@ -518,7 +518,7 @@ public:
if (success) {
return kEmptyErrorBitfield;
} else {
fprintf(stderr, "FAILED to write %s\n", path.c_str());
gm_fprintf(stderr, "FAILED to write %s\n", path.c_str());
RecordError(kWritingReferenceImage_ErrorBitmask, name,
renderModeDescriptor);
return kWritingReferenceImage_ErrorBitmask;
@ -536,15 +536,16 @@ public:
const int width = actualBitmap.width();
const int height = actualBitmap.height();
if ((expectedWidth != width) || (expectedHeight != height)) {
SkDebugf("---- %s: dimension mismatch -- expected [%d %d], actual [%d %d]\n",
testName, expectedWidth, expectedHeight, width, height);
gm_fprintf(stderr, "---- %s: dimension mismatch --"
" expected [%d %d], actual [%d %d]\n",
testName, expectedWidth, expectedHeight, width, height);
return;
}
if ((SkBitmap::kARGB_8888_Config != expectedBitmap.config()) ||
(SkBitmap::kARGB_8888_Config != actualBitmap.config())) {
SkDebugf("---- %s: not computing max per-channel pixel mismatch because non-8888\n",
testName);
gm_fprintf(stderr, "---- %s: not computing max per-channel"
" pixel mismatch because non-8888\n", testName);
return;
}
@ -575,9 +576,9 @@ public:
}
}
}
SkDebugf("---- %s: %d (of %d) differing pixels, max per-channel mismatch"
" R=%d G=%d B=%d A=%d\n",
testName, differingPixels, width*height, errR, errG, errB, errA);
gm_fprintf(stderr, "---- %s: %d (of %d) differing pixels,"
" max per-channel mismatch R=%d G=%d B=%d A=%d\n",
testName, differingPixels, width*height, errR, errG, errB, errA);
}
/**
@ -1012,16 +1013,16 @@ static const ConfigData gRec[] = {
};
static void usage(const char * argv0) {
SkDebugf("%s\n", argv0);
SkDebugf(" [--config ");
fprintf(stderr, "%s\n", argv0);
fprintf(stderr, " [--config ");
for (size_t i = 0; i < SK_ARRAY_COUNT(gRec); ++i) {
if (i > 0) {
SkDebugf("|");
fprintf(stderr, "|");
}
SkDebugf(gRec[i].fName);
fprintf(stderr, "%s", gRec[i].fName);
}
SkDebugf("]:\n run these configurations\n");
SkDebugf(
fprintf(stderr, "]:\n run these configurations\n");
fprintf(stderr,
// Alphabetized ignoring "no" prefix ("readPath", "noreplay", "resourcePath").
// It would probably be better if we allowed both yes-and-no settings for each
// one, e.g.:
@ -1197,14 +1198,12 @@ int tool_main(int argc, char** argv) {
appendUnique<size_t>(&configs, index);
userConfig = true;
} else {
SkString str;
str.printf("unrecognized config %s\n", *argv);
SkDebugf(str.c_str());
gm_fprintf(stderr, "unrecognized config %s\n", *argv);
usage(commandName);
return -1;
}
} else {
SkDebugf("missing arg for --config\n");
gm_fprintf(stderr, "missing arg for --config\n");
usage(commandName);
return -1;
}
@ -1215,14 +1214,12 @@ int tool_main(int argc, char** argv) {
if (index >= 0) {
*excludeConfigs.append() = index;
} else {
SkString str;
str.printf("unrecognized exclude-config %s\n", *argv);
SkDebugf(str.c_str());
gm_fprintf(stderr, "unrecognized exclude-config %s\n", *argv);
usage(commandName);
return -1;
}
} else {
SkDebugf("missing arg for --exclude-config\n");
gm_fprintf(stderr, "missing arg for --exclude-config\n");
usage(commandName);
return -1;
}
@ -1267,7 +1264,7 @@ int tool_main(int argc, char** argv) {
gpuCacheSize.fBytes = atoi(*++argv);
gpuCacheSize.fCount = atoi(*++argv);
} else {
SkDebugf("missing arg for --gpuCacheSize\n");
gm_fprintf(stderr, "missing arg for --gpuCacheSize\n");
usage(commandName);
return -1;
}
@ -1298,7 +1295,7 @@ int tool_main(int argc, char** argv) {
}
moduloDivisor = atoi(*argv);
if (moduloRemainder < 0 || moduloDivisor <= 0 || moduloRemainder >= moduloDivisor) {
SkDebugf("invalid modulo values.");
gm_fprintf(stderr, "invalid modulo values.");
return -1;
}
} else if (strcmp(*argv, "--nopdf") == 0) {
@ -1372,40 +1369,39 @@ int tool_main(int argc, char** argv) {
if (doVerbose) {
SkString str;
str.printf("gm: %d configs:", configs.count());
str.printf("%d configs:", configs.count());
for (int i = 0; i < configs.count(); ++i) {
str.appendf(" %s", gRec[configs[i]].fName);
}
SkDebugf("%s\n", str.c_str());
gm_fprintf(stderr, "%s\n", str.c_str());
}
GM::SetResourcePath(resourcePath);
if (readPath) {
if (!sk_exists(readPath)) {
fprintf(stderr, "readPath %s does not exist!\n", readPath);
gm_fprintf(stderr, "readPath %s does not exist!\n", readPath);
return -1;
}
if (sk_isdir(readPath)) {
fprintf(stderr, "reading from %s\n", readPath);
gm_fprintf(stdout, "reading from %s\n", readPath);
gmmain.fExpectationsSource.reset(SkNEW_ARGS(
IndividualImageExpectationsSource,
(readPath, notifyMissingReadReference)));
} else {
fprintf(stderr, "reading expectations from JSON summary file %s\n",
readPath);
gm_fprintf(stdout, "reading expectations from JSON summary file %s\n", readPath);
gmmain.fExpectationsSource.reset(SkNEW_ARGS(
JsonExpectationsSource, (readPath)));
}
}
if (writePath) {
fprintf(stderr, "writing to %s\n", writePath);
gm_fprintf(stdout, "writing to %s\n", writePath);
}
if (writePicturePath) {
fprintf(stderr, "writing pictures to %s\n", writePicturePath);
gm_fprintf(stdout, "writing pictures to %s\n", writePicturePath);
}
if (resourcePath) {
fprintf(stderr, "reading resources from %s\n", resourcePath);
gm_fprintf(stdout, "reading resources from %s\n", resourcePath);
}
if (moduloDivisor <= 0) {
@ -1465,8 +1461,8 @@ int tool_main(int argc, char** argv) {
}
SkISize size = gm->getISize();
SkDebugf("%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName,
size.width(), size.height());
gm_fprintf(stdout, "%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName,
size.width(), size.height());
ErrorBitfield testErrors = kEmptyErrorBitfield;
uint32_t gmFlags = gm->getFlags();
@ -1682,8 +1678,8 @@ int tool_main(int argc, char** argv) {
SkDELETE(gm);
}
SkDebugf("Ran %d tests: %d passed, %d failed, %d missing reference images\n",
testsRun, testsPassed, testsFailed, testsMissingReferenceImages);
gm_fprintf(stdout, "Ran %d tests: %d passed, %d failed, %d missing reference images\n",
testsRun, testsPassed, testsFailed, testsMissingReferenceImages);
gmmain.ListErrors();
if (NULL != writeJsonSummaryPath) {
@ -1713,7 +1709,7 @@ int tool_main(int argc, char** argv) {
if (kGPU_Backend == config.fBackend) {
GrContext* gr = grFactory->get(config.fGLContextType);
SkDebugf("config: %s %x\n", config.fName, gr);
gm_fprintf(stdout, "config: %s %x\n", config.fName, gr);
gr->printCacheStats();
}
}

View File

@ -0,0 +1,4 @@
GM: ---- 8888/selftest1: 60000 (of 60000) differing pixels, max per-channel mismatch R=0 G=111 B=103 A=0
GM: ---- 565/selftest1: not computing max per-channel pixel mismatch because non-8888
GM: 8888/selftest1 pixel_error
GM: 565/selftest1 pixel_error

View File

@ -1,3 +1,3 @@
reading from gm/tests/inputs/images/different-pixels
drawing... selftest1 [300 200]
Ran 1 tests: 0 passed, 1 failed, 0 missing reference images
GM: reading from gm/tests/inputs/images/different-pixels
GM: drawing... selftest1 [300 200]
GM: Ran 1 tests: 0 passed, 1 failed, 0 missing reference images

View File

@ -0,0 +1,2 @@
GM: 8888/selftest1 pixel_error
GM: 565/selftest1 pixel_error

View File

@ -1,3 +1,3 @@
reading expectations from JSON summary file gm/tests/inputs/json/different-pixels.json
drawing... selftest1 [300 200]
Ran 1 tests: 0 passed, 1 failed, 0 missing reference images
GM: reading expectations from JSON summary file gm/tests/inputs/json/different-pixels.json
GM: drawing... selftest1 [300 200]
GM: Ran 1 tests: 0 passed, 1 failed, 0 missing reference images

View File

@ -0,0 +1,2 @@
GM: FAILED to read gm/tests/inputs/images/empty-dir/8888/selftest1.png
GM: FAILED to read gm/tests/inputs/images/empty-dir/565/selftest1.png

View File

@ -1,5 +1,3 @@
reading from gm/tests/inputs/images/empty-dir
drawing... selftest1 [300 200]
FAILED to read gm/tests/inputs/images/empty-dir/8888/selftest1.png
FAILED to read gm/tests/inputs/images/empty-dir/565/selftest1.png
Ran 1 tests: 1 passed, 0 failed, 1 missing reference images
GM: reading from gm/tests/inputs/images/empty-dir
GM: drawing... selftest1 [300 200]
GM: Ran 1 tests: 1 passed, 0 failed, 1 missing reference images

View File

@ -1,3 +1,3 @@
reading from gm/tests/inputs/images/identical-bytes
drawing... selftest1 [300 200]
Ran 1 tests: 1 passed, 0 failed, 0 missing reference images
GM: reading from gm/tests/inputs/images/identical-bytes
GM: drawing... selftest1 [300 200]
GM: Ran 1 tests: 1 passed, 0 failed, 0 missing reference images

View File

@ -1,3 +1,3 @@
reading expectations from JSON summary file gm/tests/inputs/json/identical-bytes.json
drawing... selftest1 [300 200]
Ran 1 tests: 1 passed, 0 failed, 0 missing reference images
GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-bytes.json
GM: drawing... selftest1 [300 200]
GM: Ran 1 tests: 1 passed, 0 failed, 0 missing reference images

View File

@ -1,3 +1,3 @@
reading from gm/tests/inputs/images/identical-pixels
drawing... selftest1 [300 200]
Ran 1 tests: 1 passed, 0 failed, 0 missing reference images
GM: reading from gm/tests/inputs/images/identical-pixels
GM: drawing... selftest1 [300 200]
GM: Ran 1 tests: 1 passed, 0 failed, 0 missing reference images

View File

@ -1,3 +1,3 @@
reading expectations from JSON summary file gm/tests/inputs/json/identical-pixels.json
drawing... selftest1 [300 200]
Ran 1 tests: 1 passed, 0 failed, 0 missing reference images
GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-pixels.json
GM: drawing... selftest1 [300 200]
GM: Ran 1 tests: 1 passed, 0 failed, 0 missing reference images

View File

@ -1,2 +1,2 @@
drawing... selftest1 [300 200]
Ran 1 tests: 1 passed, 0 failed, 1 missing reference images
GM: drawing... selftest1 [300 200]
GM: Ran 1 tests: 1 passed, 0 failed, 1 missing reference images

View File

@ -60,7 +60,7 @@ function gm_test {
mkdir -p $ACTUAL_OUTPUT_DIR
COMMAND="$GM_BINARY $GM_ARGS --writeJsonSummary $JSON_SUMMARY_FILE"
echo "$COMMAND" >$ACTUAL_OUTPUT_DIR/command_line
$COMMAND &>$ACTUAL_OUTPUT_DIR/stdout
$COMMAND >$ACTUAL_OUTPUT_DIR/stdout 2>$ACTUAL_OUTPUT_DIR/stderr
echo $? >$ACTUAL_OUTPUT_DIR/return_value
# Only compare selected lines in the stdout, to ignore any spurious lines
@ -68,10 +68,10 @@ function gm_test {
#
# TODO(epoger): This is still hacky... we need to rewrite this script in
# Python soon, and make stuff like this more maintainable.
grep --regexp=^reading --regexp=^writing --regexp=^drawing \
--regexp=^FAILED --regexp=^Ran $ACTUAL_OUTPUT_DIR/stdout \
>$ACTUAL_OUTPUT_DIR/stdout-tmp
grep ^GM: $ACTUAL_OUTPUT_DIR/stdout >$ACTUAL_OUTPUT_DIR/stdout-tmp
mv $ACTUAL_OUTPUT_DIR/stdout-tmp $ACTUAL_OUTPUT_DIR/stdout
grep ^GM: $ACTUAL_OUTPUT_DIR/stderr >$ACTUAL_OUTPUT_DIR/stderr-tmp
mv $ACTUAL_OUTPUT_DIR/stderr-tmp $ACTUAL_OUTPUT_DIR/stderr
compare_directories $EXPECTED_OUTPUT_DIR $ACTUAL_OUTPUT_DIR
}