make ignored-tests.txt specify full test name, not partial name

BUG=skia:2022
R=bsalomon@google.com

Author: epoger@google.com

Review URL: https://codereview.chromium.org/136883006

git-svn-id: http://skia.googlecode.com/svn/trunk@13060 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
commit-bot@chromium.org 2014-01-14 02:54:11 +00:00
parent 5f43d21dd4
commit 3e62ebf93b
17 changed files with 89 additions and 43 deletions

View File

@ -1,4 +1,4 @@
# Failures of any GM tests/configs listed in this file will be ignored [1], as
# Failures of any GM tests listed in this file will be ignored [1], as
# if they had been marked "ignore-failure": true in the per-builder
# expected-results.json files.
#
@ -10,6 +10,9 @@
# If there are any lingering failures that need to be suppressed (ignored), you
# MUST move those suppressions into the per-builder expected-results.json files.
#
# The test name must match exactly (no partial string matches).
# See http://skbug.com/2022
#
# Any lines starting with '#' are comments and will be ignored.
# Other than that, each line must either be whitespace or list just one test.
#
@ -25,13 +28,10 @@
# EXAMPLES: (remove the first '#' on each line)
#
## Added by edisonn as part of https://codereview.chromium.org/23851037/
#gradients
#
## Added by epoger as part of MADE-UP BUG
## https://code.google.com/p/skia/issues/detail?id=123456 : ignoring failures on
## gpu config of gradtext GM test
#gradtext_gpu
## gradtext GM test
#gradtext
# Added for skbug.com/1998 by bsalomon. Will require rebaselining when changes are complete.
bleed

View File

@ -326,9 +326,9 @@ public:
/**
* Returns true if failures on this test should be ignored.
*/
bool ShouldIgnoreTest(const SkString &name) const {
for (int i = 0; i < fIgnorableTestSubstrings.count(); i++) {
if (name.contains(fIgnorableTestSubstrings[i].c_str())) {
bool ShouldIgnoreTest(const char *name) const {
for (int i = 0; i < fIgnorableTestNames.count(); i++) {
if (fIgnorableTestNames[i].equals(name)) {
return true;
}
}
@ -918,9 +918,6 @@ public:
ErrorCombination compare_test_results_to_stored_expectations(
GM* gm, const ConfigData& gRec, const char* configName,
const BitmapAndDigest* actualBitmapAndDigest) {
SkString shortNamePlusConfig = make_shortname_plus_config(gm->shortName(), configName);
ErrorCombination errors;
if (NULL == actualBitmapAndDigest) {
@ -937,7 +934,7 @@ public:
errors.add(ErrorCombination(kIntentionallySkipped_ErrorType));
} else {
ExpectationsSource *expectationsSource = this->fExpectationsSource.get();
SkString nameWithExtension(shortNamePlusConfig);
SkString nameWithExtension = make_shortname_plus_config(gm->shortName(), configName);
nameWithExtension.append(".");
nameWithExtension.append(kPNG_FileExtension);
@ -956,7 +953,7 @@ public:
* See comments above complete_bitmap() for more detail.
*/
Expectations expectations = expectationsSource->get(nameWithExtension.c_str());
if (this->ShouldIgnoreTest(shortNamePlusConfig)) {
if (this->ShouldIgnoreTest(gm->shortName())) {
expectations.setIgnoreFailure(true);
}
errors.add(compare_to_expectations(expectations, *actualBitmapAndDigest,
@ -1254,7 +1251,7 @@ public:
bool fUseFileHierarchy, fWriteChecksumBasedFilenames;
ErrorCombination fIgnorableErrorTypes;
SkTArray<SkString> fIgnorableTestSubstrings;
SkTArray<SkString> fIgnorableTestNames;
const char* fMismatchPath;
const char* fMissingExpectationsPath;
@ -2074,11 +2071,11 @@ static bool parse_flags_ignore_error_types(ErrorCombination* outErrorTypes) {
}
/**
* Replace contents of ignoreTestSubstrings with a list of testname/config substrings, indicating
* Replace contents of ignoreTestNames with a list of test names, indicating
* which tests' failures should be ignored.
*/
static bool parse_flags_ignore_tests(SkTArray<SkString> &ignoreTestSubstrings) {
ignoreTestSubstrings.reset();
static bool parse_flags_ignore_tests(SkTArray<SkString> &ignoreTestNames) {
ignoreTestNames.reset();
// Parse --ignoreFailuresFile
for (int i = 0; i < FLAGS_ignoreFailuresFile.count(); i++) {
@ -2091,7 +2088,7 @@ static bool parse_flags_ignore_tests(SkTArray<SkString> &ignoreTestSubstrings) {
if (thisLine.isEmpty() || thisLine.startsWith('#')) {
// skip this line
} else {
ignoreTestSubstrings.push_back(thisLine);
ignoreTestNames.push_back(thisLine);
}
}
}
@ -2231,7 +2228,7 @@ int tool_main(int argc, char** argv) {
if (!parse_flags_modulo(&moduloRemainder, &moduloDivisor) ||
!parse_flags_ignore_error_types(&gmmain.fIgnorableErrorTypes) ||
!parse_flags_ignore_tests(gmmain.fIgnorableTestSubstrings) ||
!parse_flags_ignore_tests(gmmain.fIgnorableTestNames) ||
#if SK_SUPPORT_GPU
!parse_flags_gpu_cache(&gGpuCacheSizeBytes, &gGpuCacheSizeCount) ||
#endif

View File

@ -1 +1 @@
out/Debug/gm --verbose --hierarchy --match selftest1 --ignoreFailuresFile gm/tests/inputs/ignored-tests.txt --config 8888 565 -r gm/tests/inputs/images/empty-dir --writeJsonSummaryPath gm/tests/outputs/compared-against-empty-dir/output-actual/json-summary.txt --writePath gm/tests/outputs/compared-against-empty-dir/output-actual/writePath --mismatchPath gm/tests/outputs/compared-against-empty-dir/output-actual/mismatchPath --missingExpectationsPath gm/tests/outputs/compared-against-empty-dir/output-actual/missingExpectationsPath
out/Debug/gm --verbose --hierarchy --match selftest --ignoreFailuresFile gm/tests/inputs/ignored-tests.txt --config 8888 565 -r gm/tests/inputs/images/empty-dir --writeJsonSummaryPath gm/tests/outputs/compared-against-empty-dir/output-actual/json-summary.txt --writePath gm/tests/outputs/compared-against-empty-dir/output-actual/writePath --mismatchPath gm/tests/outputs/compared-against-empty-dir/output-actual/mismatchPath --missingExpectationsPath gm/tests/outputs/compared-against-empty-dir/output-actual/missingExpectationsPath

View File

@ -4,18 +4,28 @@
"failure-ignored" : null,
"no-comparison" : {
"565/selftest1.png" : [ "bitmap-64bitMD5", 12927999507540085554 ],
"8888/selftest1.png" : [ "bitmap-64bitMD5", 1209453360120438698 ]
"565/selftest2.png" : [ "bitmap-64bitMD5", 8863920166200910451 ],
"8888/selftest1.png" : [ "bitmap-64bitMD5", 1209453360120438698 ],
"8888/selftest2.png" : [ "bitmap-64bitMD5", 13451349865803053525 ]
},
"succeeded" : null
},
"expected-results" : {
"565/selftest1.png" : {
"allowed-digests" : null,
"ignore-failure" : true
},
"565/selftest2.png" : {
"allowed-digests" : null,
"ignore-failure" : false
},
"8888/selftest1.png" : {
"allowed-digests" : null,
"ignore-failure" : true
},
"8888/selftest2.png" : {
"allowed-digests" : null,
"ignore-failure" : false
}
}
}

View File

@ -0,0 +1 @@
[contents of gm/tests/outputs/compared-against-empty-dir/output-actual/missingExpectationsPath/565/selftest2.png]

View File

@ -0,0 +1 @@
[contents of gm/tests/outputs/compared-against-empty-dir/output-actual/missingExpectationsPath/8888/selftest2.png]

View File

@ -4,17 +4,18 @@ GM: writing to gm/tests/outputs/compared-against-empty-dir/output-actual/writePa
GM: writing mismatches to gm/tests/outputs/compared-against-empty-dir/output-actual/mismatchPath
GM: writing images without expectations to gm/tests/outputs/compared-against-empty-dir/output-actual/missingExpectationsPath
GM: reading resources from resources
GM: drawing... selftest2 [300 200]
GM: drawing... selftest1 [300 200]
GM: Ran 1 GMs
GM: Ran 2 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 1 modes [""]
GM: ... so there should be a total of 2 tests.
GM: Ran 2 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 GeneratePdfFailed=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
GM: ... so there should be a total of 4 tests.
GM: Ran 4 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 GeneratePdfFailed=0 ExpectationsMismatch=0 MissingExpectations=4 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 0 GeneratePdfFailed:
GM: [*] 0 ExpectationsMismatch:
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
GM: [ ] 4 MissingExpectations: 8888/selftest2 565/selftest2 8888/selftest1 565/selftest1
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)

View File

@ -0,0 +1 @@
[contents of gm/tests/outputs/compared-against-empty-dir/output-actual/writePath/565/selftest2.png]

View File

@ -0,0 +1 @@
[contents of gm/tests/outputs/compared-against-empty-dir/output-actual/writePath/8888/selftest2.png]

View File

@ -1 +1 @@
out/Debug/gm --verbose --hierarchy --match selftest1 --ignoreFailuresFile gm/tests/inputs/ignored-tests.txt --config 8888 565 -r gm/tests/inputs/json/different-pixels.json --writeJsonSummaryPath gm/tests/outputs/ignoring-one-test/output-actual/json-summary.txt --writePath gm/tests/outputs/ignoring-one-test/output-actual/writePath --mismatchPath gm/tests/outputs/ignoring-one-test/output-actual/mismatchPath --missingExpectationsPath gm/tests/outputs/ignoring-one-test/output-actual/missingExpectationsPath
out/Debug/gm --verbose --hierarchy --match selftest --ignoreFailuresFile gm/tests/inputs/ignored-tests.txt --config 8888 565 -r gm/tests/inputs/json/different-pixels.json --writeJsonSummaryPath gm/tests/outputs/ignoring-one-test/output-actual/json-summary.txt --writePath gm/tests/outputs/ignoring-one-test/output-actual/writePath --mismatchPath gm/tests/outputs/ignoring-one-test/output-actual/mismatchPath --missingExpectationsPath gm/tests/outputs/ignoring-one-test/output-actual/missingExpectationsPath

View File

@ -1,9 +1,11 @@
{
"actual-results" : {
"failed" : {
"565/selftest1.png" : [ "bitmap-64bitMD5", 12927999507540085554 ]
"565/selftest2.png" : [ "bitmap-64bitMD5", 8863920166200910451 ],
"8888/selftest2.png" : [ "bitmap-64bitMD5", 13451349865803053525 ]
},
"failure-ignored" : {
"565/selftest1.png" : [ "bitmap-64bitMD5", 12927999507540085554 ],
"8888/selftest1.png" : [ "bitmap-64bitMD5", 1209453360120438698 ]
},
"no-comparison" : null,
@ -14,6 +16,12 @@
"allowed-digests" : [
[ "bitmap-64bitMD5", 8863920166200910451 ]
],
"ignore-failure" : true
},
"565/selftest2.png" : {
"allowed-digests" : [
[ "bitmap-64bitMD5", 12927999507540085554 ]
],
"ignore-failure" : false
},
"8888/selftest1.png" : {
@ -21,6 +29,12 @@
[ "bitmap-64bitMD5", 13451349865803053525 ]
],
"ignore-failure" : true
},
"8888/selftest2.png" : {
"allowed-digests" : [
[ "bitmap-64bitMD5", 1209453360120438698 ]
],
"ignore-failure" : false
}
}
}

View File

@ -0,0 +1 @@
[contents of gm/tests/outputs/ignoring-one-test/output-actual/mismatchPath/565/selftest2.png]

View File

@ -0,0 +1 @@
[contents of gm/tests/outputs/ignoring-one-test/output-actual/mismatchPath/8888/selftest2.png]

View File

@ -4,17 +4,18 @@ GM: writing to gm/tests/outputs/ignoring-one-test/output-actual/writePath
GM: writing mismatches to gm/tests/outputs/ignoring-one-test/output-actual/mismatchPath
GM: writing images without expectations to gm/tests/outputs/ignoring-one-test/output-actual/missingExpectationsPath
GM: reading resources from resources
GM: drawing... selftest2 [300 200]
GM: drawing... selftest1 [300 200]
GM: Ran 1 GMs
GM: Ran 2 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 1 modes [""]
GM: ... so there should be a total of 2 tests.
GM: Ran 2 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 GeneratePdfFailed=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
GM: ... so there should be a total of 4 tests.
GM: Ran 4 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 GeneratePdfFailed=0 ExpectationsMismatch=4 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 0 GeneratePdfFailed:
GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
GM: [*] 4 ExpectationsMismatch: 8888/selftest2 565/selftest2 8888/selftest1 565/selftest1
GM: [ ] 0 MissingExpectations:
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)

View File

@ -0,0 +1 @@
[contents of gm/tests/outputs/ignoring-one-test/output-actual/writePath/565/selftest2.png]

View File

@ -0,0 +1 @@
[contents of gm/tests/outputs/ignoring-one-test/output-actual/writePath/8888/selftest2.png]

View File

@ -120,6 +120,17 @@ function gm_test {
compare_directories $EXPECTED_OUTPUT_DIR $ACTUAL_OUTPUT_DIR
}
# Swap contents of two files at paths $1 and $2.
function swap_files {
if [ $# != 2 ]; then
echo "swap_files requires exactly 2 parameters, got $#"
exit 1
fi
mv "$1" "$1.tmp"
mv "$2" "$1"
mv "$1.tmp" "$2"
}
# Create input dir (at path $1) with expectations (both image and json)
# that gm will match or mismatch as appropriate.
#
@ -157,10 +168,10 @@ function create_inputs_dir {
THIS_IMAGE_DIR=$IMAGES_DIR/different-pixels
mkdir -p $THIS_IMAGE_DIR
$GM_BINARY --hierarchy --match selftest2 $CONFIGS -w $THIS_IMAGE_DIR
mv $THIS_IMAGE_DIR/8888/selftest2.png $THIS_IMAGE_DIR/8888/selftest1.png
mv $THIS_IMAGE_DIR/565/selftest2.png $THIS_IMAGE_DIR/565/selftest1.png
$GM_BINARY --hierarchy --match selftest1 $CONFIGS -r $THIS_IMAGE_DIR \
$GM_BINARY --hierarchy --match selftest $CONFIGS -w $THIS_IMAGE_DIR
swap_files $THIS_IMAGE_DIR/8888/selftest2.png $THIS_IMAGE_DIR/8888/selftest1.png
swap_files $THIS_IMAGE_DIR/565/selftest2.png $THIS_IMAGE_DIR/565/selftest1.png
$GM_BINARY --hierarchy --match selftest $CONFIGS -r $THIS_IMAGE_DIR \
--writeJsonSummaryPath $JSON_DIR/different-pixels.json
# Create another JSON expectations file which is identical to
@ -183,9 +194,11 @@ function create_inputs_dir {
echo "# Comment line" >$GM_IGNORE_FAILURES_FILE
echo "" >>$GM_IGNORE_FAILURES_FILE
echo "# ignore any test runs whose filename contains '8888/selfte'" >>$GM_IGNORE_FAILURES_FILE
echo "# (in other words, config is 8888 and test name starts with 'selfte')" >>$GM_IGNORE_FAILURES_FILE
echo "8888/selfte" >>$GM_IGNORE_FAILURES_FILE
echo "# ignore any runs of the 'selftest1' test" >>$GM_IGNORE_FAILURES_FILE
echo "selftest1" >>$GM_IGNORE_FAILURES_FILE
echo "" >>$GM_IGNORE_FAILURES_FILE
echo "# make sure we don't do partial matches (should NOT ignore 'selftest2' runs)" >>$GM_IGNORE_FAILURES_FILE
echo "selftest" >>$GM_IGNORE_FAILURES_FILE
}
GM_TESTDIR=gm/tests
@ -209,16 +222,18 @@ gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/d
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/different-pixels.json" "$GM_OUTPUTS/compared-against-different-pixels-json"
# Exercise --ignoreFailuresFile flag.
gm_test "--verbose --hierarchy --match selftest1 --ignoreFailuresFile $GM_IGNORE_FAILURES_FILE $CONFIGS -r $GM_INPUTS/json/different-pixels.json" "$GM_OUTPUTS/ignoring-one-test"
# This should run two GM tests: selftest1 and selftest2.
# Failures in selftest1 should be ignored, but failures in selftest2 should not.
gm_test "--verbose --hierarchy --match selftest --ignoreFailuresFile $GM_IGNORE_FAILURES_FILE $CONFIGS -r $GM_INPUTS/json/different-pixels.json" "$GM_OUTPUTS/ignoring-one-test"
# Compare different pixels, but with a SUBSET of the expectations marked as
# ignore-failure.
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/different-pixels-ignore-some-failures.json" "$GM_OUTPUTS/ignoring-some-failures"
# Compare generated image against an empty "expected image" dir.
# Even the tests that have been marked as ignore-failure should show up as
# no-comparison.
gm_test "--verbose --hierarchy --match selftest1 --ignoreFailuresFile $GM_IGNORE_FAILURES_FILE $CONFIGS -r $GM_INPUTS/images/empty-dir" "$GM_OUTPUTS/compared-against-empty-dir"
# Even the tests that have been marked as ignore-failure (selftest1) should
# show up as no-comparison.
gm_test "--verbose --hierarchy --match selftest --ignoreFailuresFile $GM_IGNORE_FAILURES_FILE $CONFIGS -r $GM_INPUTS/images/empty-dir" "$GM_OUTPUTS/compared-against-empty-dir"
# Compare generated image against a nonexistent "expected image" dir.
gm_test "--verbose --hierarchy --match selftest1 $CONFIGS -r ../path/to/nowhere" "$GM_OUTPUTS/compared-against-nonexistent-dir"