Improve tool that analyzes gm JSON summary

BUG=https://code.google.com/p/skia/issues/detail?id=1300
R=borenet@google.com

Review URL: https://codereview.chromium.org/15298010

git-svn-id: http://skia.googlecode.com/svn/trunk@9217 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
epoger@google.com 2013-05-21 16:06:40 +00:00
parent 2d51677b19
commit a55e48d6ef
4 changed files with 124 additions and 51 deletions

View File

@ -1,47 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility to confirm that a JSON summary written by GM contains no failures.
Usage:
python confirm_no_failures_in_json.py <filename>
"""
__author__ = 'Elliot Poger'
import json
import sys
# These constants must be kept in sync with the kJsonKey_ constants in
# gm_expectations.cpp !
JSONKEY_ACTUALRESULTS = 'actual-results'
JSONKEY_ACTUALRESULTS_FAILED = 'failed'
# This is the same indent level as used by jsoncpp, just for consistency.
JSON_INDENTLEVEL = 3
def Assert(filepath):
"""Raises an exception if the JSON summary at filepath contains any failed
tests, or if we were unable to read the JSON summary."""
failed_tests = GetFailedTests(filepath)
if failed_tests:
raise Exception('JSON file %s contained these test failures...\n%s' % (
filepath, json.dumps(failed_tests, indent=JSON_INDENTLEVEL)))
def GetFailedTests(filepath):
"""Returns the dictionary of failed tests from the JSON file at filepath."""
json_dict = json.load(open(filepath))
actual_results = json_dict[JSONKEY_ACTUALRESULTS]
return actual_results[JSONKEY_ACTUALRESULTS_FAILED]
if '__main__' == __name__:
if len(sys.argv) != 2:
raise Exception('usage: %s <input-json-filepath>' % sys.argv[0])
Assert(sys.argv[1])

120
gm/display_json_results.py Normal file
View File

@ -0,0 +1,120 @@
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility to display a summary of JSON-format GM results, and exit with
a nonzero errorcode if there were non-ignored failures in the GM results.
Usage:
python display_json_results.py <filename>
TODO(epoger): We may want to add flags to set the following:
- which error types cause a nonzero return code
- maximum number of tests to list for any one ResultAccumulator
(to keep the output reasonably short)
"""
__author__ = 'Elliot Poger'
import json
import sys
# These constants must be kept in sync with the kJsonKey_ constants in
# gm_expectations.cpp !
JSONKEY_ACTUALRESULTS = 'actual-results'
JSONKEY_ACTUALRESULTS_FAILED = 'failed'
JSONKEY_ACTUALRESULTS_FAILUREIGNORED = 'failure-ignored'
JSONKEY_ACTUALRESULTS_NOCOMPARISON = 'no-comparison'
JSONKEY_ACTUALRESULTS_SUCCEEDED = 'succeeded'
class ResultAccumulator(object):
"""Object that accumulates results of a given type, and can generate a
summary upon request."""
def __init__(self, name, do_list, do_fail):
"""name: name of the category this result type falls into
do_list: whether to list all of the tests with this results type
do_fail: whether to return with nonzero exit code if there are any
results of this type
"""
self._name = name
self._do_list = do_list
self._do_fail = do_fail
self._testnames = []
def AddResult(self, testname):
"""Adds a result of this particular type.
testname: (string) name of the test"""
self._testnames.append(testname)
def ShouldSignalFailure(self):
"""Returns true if this result type is serious (self._do_fail is True)
and there were any results of this type."""
if self._do_fail and self._testnames:
return True
else:
return False
def GetSummaryLine(self):
"""Returns a single-line string summary of all results added to this
accumulator so far."""
summary = ''
if self._do_fail:
summary += '[*] '
else:
summary += '[ ] '
summary += str(len(self._testnames))
summary += ' '
summary += self._name
if self._do_list:
summary += ': '
for testname in self._testnames:
summary += testname
summary += ' '
return summary
def Display(filepath):
"""Displays a summary of the results in a JSON file.
Returns True if the results are free of any significant failures.
filepath: (string) path to JSON file"""
# Map labels within the JSON file to the ResultAccumulator for each label.
results_map = {
JSONKEY_ACTUALRESULTS_FAILED:
ResultAccumulator(name='ExpectationsMismatch',
do_list=True, do_fail=True),
JSONKEY_ACTUALRESULTS_FAILUREIGNORED:
ResultAccumulator(name='IgnoredExpectationsMismatch',
do_list=True, do_fail=False),
JSONKEY_ACTUALRESULTS_NOCOMPARISON:
ResultAccumulator(name='MissingExpectations',
do_list=False, do_fail=False),
JSONKEY_ACTUALRESULTS_SUCCEEDED:
ResultAccumulator(name='Passed',
do_list=False, do_fail=False),
}
success = True
json_dict = json.load(open(filepath))
actual_results = json_dict[JSONKEY_ACTUALRESULTS]
for label, accumulator in results_map.iteritems():
results = actual_results[label]
if results:
for result in results:
accumulator.AddResult(result)
print accumulator.GetSummaryLine()
if accumulator.ShouldSignalFailure():
success = False
print '(results marked with [*] will cause nonzero return value)'
return success
if '__main__' == __name__:
if len(sys.argv) != 2:
raise Exception('usage: %s <input-json-filepath>' % sys.argv[0])
sys.exit(0 if Display(sys.argv[1]) else 1)

View File

@ -12,7 +12,7 @@
#define DEBUGFAIL_SEE_STDERR SkDEBUGFAIL("see stderr for message")
// These constants must be kept in sync with the JSONKEY_ constants in
// confirm_no_failures_in_json.py !
// display_json_results.py !
const static char kJsonKey_ActualResults[] = "actual-results";
const static char kJsonKey_ActualResults_Failed[] = "failed";
const static char kJsonKey_ActualResults_FailureIgnored[]= "failure-ignored";

View File

@ -216,14 +216,14 @@ gm_test "--ignoreErrorTypes ExpectationsMismatch NoGpuContext --verbose --hierar
# Test non-hierarchical mode.
gm_test "--verbose --match selftest1 $CONFIGS -r $GM_INPUTS/json/different-pixels-no-hierarchy.json" "$GM_OUTPUTS/no-hierarchy"
# Exercise confirm_no_failures_in_json.py
# Exercise display_json_results.py
PASSING_CASES="compared-against-identical-bytes-json compared-against-identical-pixels-json"
FAILING_CASES="compared-against-different-pixels-json"
for CASE in $PASSING_CASES; do
assert_passes "python gm/confirm_no_failures_in_json.py $GM_OUTPUTS/$CASE/$OUTPUT_EXPECTED_SUBDIR/json-summary.txt"
assert_passes "python gm/display_json_results.py $GM_OUTPUTS/$CASE/$OUTPUT_EXPECTED_SUBDIR/json-summary.txt"
done
for CASE in $FAILING_CASES; do
assert_fails "python gm/confirm_no_failures_in_json.py $GM_OUTPUTS/$CASE/$OUTPUT_EXPECTED_SUBDIR/json-summary.txt"
assert_fails "python gm/display_json_results.py $GM_OUTPUTS/$CASE/$OUTPUT_EXPECTED_SUBDIR/json-summary.txt"
done
if [ $ENCOUNTERED_ANY_ERRORS == 0 ]; then