gm: hide list of tests with ignored errors, by default
Review URL: https://codereview.chromium.org/13811031 git-svn-id: http://skia.googlecode.com/svn/trunk@8591 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
aae71baa73
commit
51dbabee67
@ -299,9 +299,40 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List contents of fFailedTests to stdout.
|
* Display the summary of results with this ErrorType.
|
||||||
|
*
|
||||||
|
* @param type which ErrorType
|
||||||
|
* @param verbose whether to be all verbose about it
|
||||||
*/
|
*/
|
||||||
void ListErrors() {
|
void DisplayResultTypeSummary(ErrorType type, bool verbose) {
|
||||||
|
bool isIgnorableType = fIgnorableErrorCombination.includes(type);
|
||||||
|
|
||||||
|
SkString line;
|
||||||
|
if (isIgnorableType) {
|
||||||
|
line.append("[ ] ");
|
||||||
|
} else {
|
||||||
|
line.append("[*] ");
|
||||||
|
}
|
||||||
|
|
||||||
|
SkTArray<SkString> *failedTestsOfThisType = &fFailedTests[type];
|
||||||
|
int count = failedTestsOfThisType->count();
|
||||||
|
line.appendf("%d %s", count, getErrorTypeName(type));
|
||||||
|
if (!isIgnorableType || verbose) {
|
||||||
|
line.append(":");
|
||||||
|
for (int i = 0; i < count; ++i) {
|
||||||
|
line.append(" ");
|
||||||
|
line.append((*failedTestsOfThisType)[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
gm_fprintf(stdout, "%s\n", line.c_str());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List contents of fFailedTests to stdout.
|
||||||
|
*
|
||||||
|
* @param verbose whether to be all verbose about it
|
||||||
|
*/
|
||||||
|
void ListErrors(bool verbose) {
|
||||||
// First, print a single summary line.
|
// First, print a single summary line.
|
||||||
SkString summary;
|
SkString summary;
|
||||||
summary.appendf("Ran %d tests:", fTestsRun);
|
summary.appendf("Ran %d tests:", fTestsRun);
|
||||||
@ -313,22 +344,7 @@ public:
|
|||||||
|
|
||||||
// Now, for each failure type, list the tests that failed that way.
|
// Now, for each failure type, list the tests that failed that way.
|
||||||
for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
|
for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
|
||||||
SkString line;
|
this->DisplayResultTypeSummary(static_cast<ErrorType>(typeInt), verbose);
|
||||||
ErrorType type = static_cast<ErrorType>(typeInt);
|
|
||||||
if (fIgnorableErrorCombination.includes(type)) {
|
|
||||||
line.append("[ ] ");
|
|
||||||
} else {
|
|
||||||
line.append("[*] ");
|
|
||||||
}
|
|
||||||
|
|
||||||
SkTArray<SkString> *failedTestsOfThisType = &fFailedTests[type];
|
|
||||||
int count = failedTestsOfThisType->count();
|
|
||||||
line.appendf("%d %s:", count, getErrorTypeName(type));
|
|
||||||
for (int i = 0; i < count; ++i) {
|
|
||||||
line.append(" ");
|
|
||||||
line.append((*failedTestsOfThisType)[i]);
|
|
||||||
}
|
|
||||||
gm_fprintf(stdout, "%s\n", line.c_str());
|
|
||||||
}
|
}
|
||||||
gm_fprintf(stdout, "(results marked with [*] will cause nonzero return value)\n");
|
gm_fprintf(stdout, "(results marked with [*] will cause nonzero return value)\n");
|
||||||
}
|
}
|
||||||
@ -1686,15 +1702,6 @@ int tool_main(int argc, char** argv) {
|
|||||||
GrContextFactory* grFactory = NULL;
|
GrContextFactory* grFactory = NULL;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (FLAGS_verbose) {
|
|
||||||
SkString str;
|
|
||||||
str.printf("%d configs:", configs.count());
|
|
||||||
for (int i = 0; i < configs.count(); ++i) {
|
|
||||||
str.appendf(" %s", gRec[configs[i]].fName);
|
|
||||||
}
|
|
||||||
gm_fprintf(stderr, "%s\n", str.c_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (FLAGS_resourcePath.count() == 1) {
|
if (FLAGS_resourcePath.count() == 1) {
|
||||||
GM::SetResourcePath(FLAGS_resourcePath[0]);
|
GM::SetResourcePath(FLAGS_resourcePath[0]);
|
||||||
}
|
}
|
||||||
@ -1798,15 +1805,17 @@ int tool_main(int argc, char** argv) {
|
|||||||
if (gmmain.NumSignificantErrors() > 0) {
|
if (gmmain.NumSignificantErrors() > 0) {
|
||||||
reportError = true;
|
reportError = true;
|
||||||
}
|
}
|
||||||
|
int expectedNumberOfTests = gmsRun * (configs.count() + modes.count());
|
||||||
|
|
||||||
// Output summary to stdout.
|
// Output summary to stdout.
|
||||||
gm_fprintf(stdout, "Ran %d GMs\n", gmsRun);
|
if (FLAGS_verbose) {
|
||||||
gm_fprintf(stdout, "... over %2d configs [%s]\n", configs.count(),
|
gm_fprintf(stdout, "Ran %d GMs\n", gmsRun);
|
||||||
list_all_config_names(configs).c_str());
|
gm_fprintf(stdout, "... over %2d configs [%s]\n", configs.count(),
|
||||||
gm_fprintf(stdout, "... and %2d modes [%s]\n", modes.count(), list_all(modes).c_str());
|
list_all_config_names(configs).c_str());
|
||||||
int expectedNumberOfTests = gmsRun * (configs.count() + modes.count());
|
gm_fprintf(stdout, "... and %2d modes [%s]\n", modes.count(), list_all(modes).c_str());
|
||||||
gm_fprintf(stdout, "... so there should be a total of %d tests.\n", expectedNumberOfTests);
|
gm_fprintf(stdout, "... so there should be a total of %d tests.\n", expectedNumberOfTests);
|
||||||
gmmain.ListErrors();
|
}
|
||||||
|
gmmain.ListErrors(FLAGS_verbose);
|
||||||
|
|
||||||
// TODO(epoger): in a standalone CL, enable this new check.
|
// TODO(epoger): in a standalone CL, enable this new check.
|
||||||
#if 0
|
#if 0
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/different-pixels --writeJsonSummaryPath gm/tests/outputs/compared-against-different-pixels-images/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/different-pixels --writeJsonSummaryPath gm/tests/outputs/compared-against-different-pixels-images/output-actual/json-summary.txt
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/different-pixels.json --writeJsonSummaryPath gm/tests/outputs/compared-against-different-pixels-json/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/different-pixels.json --writeJsonSummaryPath gm/tests/outputs/compared-against-different-pixels-json/output-actual/json-summary.txt
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/empty-dir --writeJsonSummaryPath gm/tests/outputs/compared-against-empty-dir/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/empty-dir --writeJsonSummaryPath gm/tests/outputs/compared-against-empty-dir/output-actual/json-summary.txt
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/identical-bytes --writeJsonSummaryPath gm/tests/outputs/compared-against-identical-bytes-images/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/identical-bytes --writeJsonSummaryPath gm/tests/outputs/compared-against-identical-bytes-images/output-actual/json-summary.txt
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-bytes.json --writeJsonSummaryPath gm/tests/outputs/compared-against-identical-bytes-json/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-bytes.json --writeJsonSummaryPath gm/tests/outputs/compared-against-identical-bytes-json/output-actual/json-summary.txt
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/identical-pixels --writeJsonSummaryPath gm/tests/outputs/compared-against-identical-pixels-images/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/identical-pixels --writeJsonSummaryPath gm/tests/outputs/compared-against-identical-pixels-images/output-actual/json-summary.txt
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-pixels.json --writeJsonSummaryPath gm/tests/outputs/compared-against-identical-pixels-json/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-pixels.json --writeJsonSummaryPath gm/tests/outputs/compared-against-identical-pixels-json/output-actual/json-summary.txt
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 selftest2 --config 8888 565 --writeJsonSummaryPath gm/tests/outputs/intentionally-skipped-tests/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 selftest2 --config 8888 565 --writeJsonSummaryPath gm/tests/outputs/intentionally-skipped-tests/output-actual/json-summary.txt
|
||||||
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 --writeJsonSummaryPath gm/tests/outputs/no-readpath/output-actual/json-summary.txt
|
out/Debug/gm --verbose --hierarchy --match selftest1 --config 8888 565 --writeJsonSummaryPath gm/tests/outputs/no-readpath/output-actual/json-summary.txt
|
||||||
|
1
gm/tests/outputs/nonverbose/output-expected/command_line
Normal file
1
gm/tests/outputs/nonverbose/output-expected/command_line
Normal file
@ -0,0 +1 @@
|
|||||||
|
out/Debug/gm --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/images/empty-dir --writeJsonSummaryPath gm/tests/outputs/nonverbose/output-actual/json-summary.txt
|
25
gm/tests/outputs/nonverbose/output-expected/json-summary.txt
Normal file
25
gm/tests/outputs/nonverbose/output-expected/json-summary.txt
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"actual-results" : {
|
||||||
|
"failed" : null,
|
||||||
|
"failure-ignored" : null,
|
||||||
|
"no-comparison" : {
|
||||||
|
"565/selftest1" : {
|
||||||
|
"checksum" : 9512553915271796906
|
||||||
|
},
|
||||||
|
"8888/selftest1" : {
|
||||||
|
"checksum" : 14022967492765711532
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"succeeded" : null
|
||||||
|
},
|
||||||
|
"expected-results" : {
|
||||||
|
"565/selftest1" : {
|
||||||
|
"checksums" : null,
|
||||||
|
"ignore-failure" : false
|
||||||
|
},
|
||||||
|
"8888/selftest1" : {
|
||||||
|
"checksums" : null,
|
||||||
|
"ignore-failure" : false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
1
gm/tests/outputs/nonverbose/output-expected/return_value
Normal file
1
gm/tests/outputs/nonverbose/output-expected/return_value
Normal file
@ -0,0 +1 @@
|
|||||||
|
0
|
2
gm/tests/outputs/nonverbose/output-expected/stderr
Normal file
2
gm/tests/outputs/nonverbose/output-expected/stderr
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
GM: FAILED to read gm/tests/inputs/images/empty-dir/8888/selftest1.png
|
||||||
|
GM: FAILED to read gm/tests/inputs/images/empty-dir/565/selftest1.png
|
10
gm/tests/outputs/nonverbose/output-expected/stdout
Normal file
10
gm/tests/outputs/nonverbose/output-expected/stdout
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
GM: reading from gm/tests/inputs/images/empty-dir
|
||||||
|
GM: drawing... selftest1 [300 200]
|
||||||
|
GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
|
||||||
|
GM: [*] 0 NoGpuContext:
|
||||||
|
GM: [ ] 0 IntentionallySkipped
|
||||||
|
GM: [*] 0 RenderModeMismatch:
|
||||||
|
GM: [*] 0 ExpectationsMismatch:
|
||||||
|
GM: [ ] 2 MissingExpectations
|
||||||
|
GM: [*] 0 WritingReferenceImage:
|
||||||
|
GM: (results marked with [*] will cause nonzero return value)
|
@ -1 +1 @@
|
|||||||
out/Debug/gm --simulatePipePlaybackFailure --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-pixels.json --writeJsonSummaryPath gm/tests/outputs/pipe-playback-failure/output-actual/json-summary.txt
|
out/Debug/gm --simulatePipePlaybackFailure --verbose --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-pixels.json --writeJsonSummaryPath gm/tests/outputs/pipe-playback-failure/output-actual/json-summary.txt
|
||||||
|
@ -137,29 +137,32 @@ GM_TEMPFILES=$GM_TESTDIR/tempfiles
|
|||||||
create_inputs_dir $GM_INPUTS
|
create_inputs_dir $GM_INPUTS
|
||||||
|
|
||||||
# Compare generated image against an input image file with identical bytes.
|
# Compare generated image against an input image file with identical bytes.
|
||||||
gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/identical-bytes" "$GM_OUTPUTS/compared-against-identical-bytes-images"
|
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/identical-bytes" "$GM_OUTPUTS/compared-against-identical-bytes-images"
|
||||||
gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-bytes.json" "$GM_OUTPUTS/compared-against-identical-bytes-json"
|
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-bytes.json" "$GM_OUTPUTS/compared-against-identical-bytes-json"
|
||||||
|
|
||||||
# Compare generated image against an input image file with identical pixels but different PNG encoding.
|
# Compare generated image against an input image file with identical pixels but different PNG encoding.
|
||||||
gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/identical-pixels" "$GM_OUTPUTS/compared-against-identical-pixels-images"
|
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/identical-pixels" "$GM_OUTPUTS/compared-against-identical-pixels-images"
|
||||||
gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-pixels.json" "$GM_OUTPUTS/compared-against-identical-pixels-json"
|
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-pixels.json" "$GM_OUTPUTS/compared-against-identical-pixels-json"
|
||||||
|
|
||||||
# Compare generated image against an input image file with different pixels.
|
# Compare generated image against an input image file with different pixels.
|
||||||
gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/different-pixels" "$GM_OUTPUTS/compared-against-different-pixels-images"
|
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/different-pixels" "$GM_OUTPUTS/compared-against-different-pixels-images"
|
||||||
gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/different-pixels.json" "$GM_OUTPUTS/compared-against-different-pixels-json"
|
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/different-pixels.json" "$GM_OUTPUTS/compared-against-different-pixels-json"
|
||||||
|
|
||||||
# Compare generated image against an empty "expected image" dir.
|
# Compare generated image against an empty "expected image" dir.
|
||||||
gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/empty-dir" "$GM_OUTPUTS/compared-against-empty-dir"
|
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/empty-dir" "$GM_OUTPUTS/compared-against-empty-dir"
|
||||||
|
|
||||||
|
# Compare generated image against an empty "expected image" dir, but NOT in verbose mode.
|
||||||
|
gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/empty-dir" "$GM_OUTPUTS/nonverbose"
|
||||||
|
|
||||||
# If run without "-r", the JSON's "actual-results" section should contain
|
# If run without "-r", the JSON's "actual-results" section should contain
|
||||||
# actual checksums marked as "failure-ignored", but the "expected-results"
|
# actual checksums marked as "failure-ignored", but the "expected-results"
|
||||||
# section should be empty.
|
# section should be empty.
|
||||||
gm_test "--hierarchy --match selftest1 $CONFIGS" "$GM_OUTPUTS/no-readpath"
|
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS" "$GM_OUTPUTS/no-readpath"
|
||||||
|
|
||||||
# Test what happens if a subset of the renderModes fail (e.g. pipe)
|
# Test what happens if a subset of the renderModes fail (e.g. pipe)
|
||||||
gm_test "--simulatePipePlaybackFailure --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-pixels.json" "$GM_OUTPUTS/pipe-playback-failure"
|
gm_test "--simulatePipePlaybackFailure --verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-pixels.json" "$GM_OUTPUTS/pipe-playback-failure"
|
||||||
|
|
||||||
# Confirm that IntentionallySkipped tests are recorded as such.
|
# Confirm that IntentionallySkipped tests are recorded as such.
|
||||||
gm_test "--hierarchy --match selftest1 selftest2 $CONFIGS" "$GM_OUTPUTS/intentionally-skipped-tests"
|
gm_test "--verbose --hierarchy --match selftest1 selftest2 $CONFIGS" "$GM_OUTPUTS/intentionally-skipped-tests"
|
||||||
|
|
||||||
echo "All tests passed."
|
echo "All tests passed."
|
||||||
|
Loading…
Reference in New Issue
Block a user