2014-01-07 17:03:40 +00:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
|
|
|
"""
|
|
|
|
Copyright 2014 Google Inc.
|
|
|
|
|
|
|
|
Use of this source code is governed by a BSD-style license that can be
|
|
|
|
found in the LICENSE file.
|
|
|
|
|
|
|
|
A wrapper around the standard Python unittest library, adding features we need
|
|
|
|
for various unittests within this directory.
|
2014-07-17 19:54:16 +00:00
|
|
|
|
|
|
|
TODO(epoger): Move this into the common repo for broader use? Or at least in
|
|
|
|
a more common place within the Skia repo?
|
2014-01-07 17:03:40 +00:00
|
|
|
"""
|
|
|
|
|
2014-05-15 15:10:48 +00:00
|
|
|
import errno
|
2014-07-17 19:54:16 +00:00
|
|
|
import filecmp
|
2014-01-07 17:03:40 +00:00
|
|
|
import os
|
2014-05-15 15:10:48 +00:00
|
|
|
import shutil
|
2014-07-17 19:54:16 +00:00
|
|
|
import tempfile
|
2014-01-07 17:03:40 +00:00
|
|
|
import unittest
|
|
|
|
|
2014-07-17 19:54:16 +00:00
|
|
|
TRUNK_DIR = os.path.abspath(os.path.join(
|
|
|
|
os.path.dirname(__file__), os.pardir, os.pardir))
|
2014-02-07 18:41:49 +00:00
|
|
|
|
2014-01-07 17:03:40 +00:00
|
|
|
|
|
|
|
class TestCase(unittest.TestCase):
|
|
|
|
|
2014-07-17 19:54:16 +00:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(TestCase, self).__init__(*args, **kwargs)
|
|
|
|
# Subclasses should override this default value if they want their output
|
|
|
|
# to be automatically compared against expectations (see setUp and tearDown)
|
|
|
|
self._testdata_dir = None
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
"""Called before each test."""
|
|
|
|
# Get the name of this test, in such a way that it will be consistent
|
|
|
|
# regardless of the directory it is run from (throw away package names,
|
|
|
|
# if any).
|
|
|
|
self._test_name = '.'.join(self.id().split('.')[-3:])
|
|
|
|
|
|
|
|
self._temp_dir = tempfile.mkdtemp()
|
|
|
|
if self._testdata_dir:
|
|
|
|
self.create_empty_dir(self.output_dir_actual)
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
"""Called after each test."""
|
|
|
|
shutil.rmtree(self._temp_dir)
|
|
|
|
if self._testdata_dir and os.path.exists(self.output_dir_expected):
|
|
|
|
different_files = _find_different_files(self.output_dir_actual,
|
|
|
|
self.output_dir_expected)
|
|
|
|
# Don't add any cleanup code below this assert!
|
|
|
|
# Then if tests fail, the artifacts will not be cleaned up.
|
|
|
|
assert (not different_files), \
|
|
|
|
('found differing files:\n' +
|
|
|
|
'\n'.join(['tkdiff %s %s &' % (
|
|
|
|
os.path.join(self.output_dir_actual, basename),
|
|
|
|
os.path.join(self.output_dir_expected, basename))
|
|
|
|
for basename in different_files]))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def temp_dir(self):
|
|
|
|
return self._temp_dir
|
|
|
|
|
|
|
|
@property
|
|
|
|
def input_dir(self):
|
|
|
|
assert self._testdata_dir, 'self._testdata_dir must be set'
|
|
|
|
return os.path.join(self._testdata_dir, 'inputs')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def output_dir_actual(self):
|
|
|
|
assert self._testdata_dir, 'self._testdata_dir must be set'
|
|
|
|
return os.path.join(
|
|
|
|
self._testdata_dir, 'outputs', 'actual', self._test_name)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def output_dir_expected(self):
|
|
|
|
assert self._testdata_dir, 'self._testdata_dir must be set'
|
|
|
|
return os.path.join(
|
|
|
|
self._testdata_dir, 'outputs', 'expected', self._test_name)
|
|
|
|
|
2014-01-07 17:03:40 +00:00
|
|
|
def shortDescription(self):
|
|
|
|
"""Tell unittest framework to not print docstrings for test cases."""
|
|
|
|
return None
|
|
|
|
|
2014-05-15 15:10:48 +00:00
|
|
|
def create_empty_dir(self, path):
|
|
|
|
"""Creates an empty directory at path and returns path.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
path: path on local disk
|
|
|
|
"""
|
2014-07-17 19:54:16 +00:00
|
|
|
# Delete the old one, if any.
|
|
|
|
if os.path.isdir(path):
|
|
|
|
shutil.rmtree(path=path, ignore_errors=True)
|
|
|
|
elif os.path.lexists(path):
|
|
|
|
os.remove(path)
|
|
|
|
|
|
|
|
# Create the new one.
|
2014-05-15 15:10:48 +00:00
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
|
|
|
except OSError as exc:
|
2014-07-17 19:54:16 +00:00
|
|
|
# Guard against race condition (somebody else is creating the same dir)
|
2014-05-15 15:10:48 +00:00
|
|
|
if exc.errno != errno.EEXIST:
|
|
|
|
raise
|
|
|
|
return path
|
|
|
|
|
2014-01-07 17:03:40 +00:00
|
|
|
|
2014-07-17 19:54:16 +00:00
|
|
|
def _find_different_files(dir1, dir2, ignore_subtree_names=None):
|
|
|
|
"""Returns a list of any files that differ between the directory trees rooted
|
|
|
|
at dir1 and dir2.
|
2014-01-07 17:03:40 +00:00
|
|
|
|
2014-07-17 19:54:16 +00:00
|
|
|
Args:
|
|
|
|
dir1: root of a directory tree; if nonexistent, will raise OSError
|
|
|
|
dir2: root of another directory tree; if nonexistent, will raise OSError
|
|
|
|
ignore_subtree_names: list of subtree directory names to ignore;
|
|
|
|
defaults to ['.svn'], so all SVN files are ignores
|
2014-01-07 17:03:40 +00:00
|
|
|
|
2014-07-17 19:54:16 +00:00
|
|
|
TODO(epoger): include the dirname within each filename (not just the
|
|
|
|
basename), to make it easier to locate any differences
|
|
|
|
"""
|
|
|
|
differing_files = []
|
|
|
|
if ignore_subtree_names is None:
|
|
|
|
ignore_subtree_names = ['.svn']
|
|
|
|
dircmp = filecmp.dircmp(dir1, dir2, ignore=ignore_subtree_names)
|
|
|
|
differing_files.extend(dircmp.left_only)
|
|
|
|
differing_files.extend(dircmp.right_only)
|
|
|
|
differing_files.extend(dircmp.common_funny)
|
|
|
|
differing_files.extend(dircmp.diff_files)
|
|
|
|
differing_files.extend(dircmp.funny_files)
|
|
|
|
for common_dir in dircmp.common_dirs:
|
|
|
|
differing_files.extend(_find_different_files(
|
|
|
|
os.path.join(dir1, common_dir), os.path.join(dir2, common_dir)))
|
|
|
|
return differing_files
|
2014-01-07 17:03:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
def main(test_case_class):
|
|
|
|
"""Run the unit tests within the given class.
|
|
|
|
|
|
|
|
Raises an Exception if any of those tests fail (in case we are running in the
|
|
|
|
context of run_all.py, which depends on that Exception to signal failures).
|
|
|
|
"""
|
|
|
|
suite = unittest.TestLoader().loadTestsFromTestCase(test_case_class)
|
|
|
|
results = unittest.TextTestRunner(verbosity=2).run(suite)
|
|
|
|
if not results.wasSuccessful():
|
|
|
|
raise Exception('failed unittest %s' % test_case_class)
|