reland "rebaseline_server: download actual-results.json files from GCS instead of SVN"
relands https://codereview.chromium.org/310093003 with modifications. BUG=skia:2641 R=jcgregorio@google.com Author: epoger@google.com Review URL: https://codereview.chromium.org/313343003
This commit is contained in:
parent
9de2fb680f
commit
b144271179
1
DEPS
1
DEPS
@ -8,7 +8,6 @@ deps = {
|
|||||||
"third_party/externals/angle" : "https://chromium.googlesource.com/external/angleproject.git",
|
"third_party/externals/angle" : "https://chromium.googlesource.com/external/angleproject.git",
|
||||||
"third_party/externals/angle2" : "https://chromium.googlesource.com/angle/angle.git@bdc9b2f0ed9e365bf5a4d19799d93a512f07dd32",
|
"third_party/externals/angle2" : "https://chromium.googlesource.com/angle/angle.git@bdc9b2f0ed9e365bf5a4d19799d93a512f07dd32",
|
||||||
"third_party/externals/freetype" : "https://skia.googlesource.com/third_party/freetype2.git@VER-2-5-0-1",
|
"third_party/externals/freetype" : "https://skia.googlesource.com/third_party/freetype2.git@VER-2-5-0-1",
|
||||||
"third_party/externals/google-api-python-client" : "https://github.com/google/google-api-python-client.git@56557e2c1d2cbce0d2de26e3a7f32f836b8f5eb2",
|
|
||||||
"third_party/externals/gyp" : "https://chromium.googlesource.com/external/gyp.git@11e243c9fd625728c086c264d5ca85cc786ddf82",
|
"third_party/externals/gyp" : "https://chromium.googlesource.com/external/gyp.git@11e243c9fd625728c086c264d5ca85cc786ddf82",
|
||||||
"third_party/externals/libjpeg" : "https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@82ce8a6d4ebe12a177c0c3597192f2b4f09e81c3",
|
"third_party/externals/libjpeg" : "https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@82ce8a6d4ebe12a177c0c3597192f2b4f09e81c3",
|
||||||
"third_party/externals/jsoncpp" : "https://chromium.googlesource.com/external/jsoncpp/jsoncpp.git@ab1e40f3bce061ea6f9bdc60351d6cde2a4f872b",
|
"third_party/externals/jsoncpp" : "https://chromium.googlesource.com/external/jsoncpp/jsoncpp.git@ab1e40f3bce061ea6f9bdc60351d6cde2a4f872b",
|
||||||
|
@ -14,28 +14,12 @@ import argparse
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
# Imports from within Skia
|
# Imports from within Skia
|
||||||
#
|
import fix_pythonpath # must do this first
|
||||||
# TODO(epoger): Once we move the create_filepath_url() function out of
|
from pyutils import url_utils
|
||||||
# download_actuals into a shared utility module, we won't need to import
|
|
||||||
# download_actuals anymore.
|
|
||||||
#
|
|
||||||
# We need to add the 'gm' directory, so that we can import gm_json.py within
|
|
||||||
# that directory. That script allows us to parse the actual-results.json file
|
|
||||||
# written out by the GM tool.
|
|
||||||
# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
|
|
||||||
# so any dirs that are already in the PYTHONPATH will be preferred.
|
|
||||||
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
GM_DIRECTORY = os.path.dirname(PARENT_DIRECTORY)
|
|
||||||
TRUNK_DIRECTORY = os.path.dirname(GM_DIRECTORY)
|
|
||||||
if GM_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(GM_DIRECTORY)
|
|
||||||
import download_actuals
|
|
||||||
import gm_json
|
import gm_json
|
||||||
import imagediffdb
|
import imagediffdb
|
||||||
import imagepair
|
import imagepair
|
||||||
@ -71,7 +55,7 @@ class ConfigComparisons(results.BaseComparisons):
|
|||||||
self._image_diff_db = imagediffdb.ImageDiffDB(generated_images_root)
|
self._image_diff_db = imagediffdb.ImageDiffDB(generated_images_root)
|
||||||
self._diff_base_url = (
|
self._diff_base_url = (
|
||||||
diff_base_url or
|
diff_base_url or
|
||||||
download_actuals.create_filepath_url(generated_images_root))
|
url_utils.create_filepath_url(generated_images_root))
|
||||||
self._actuals_root = actuals_root
|
self._actuals_root = actuals_root
|
||||||
self._load_config_pairs(configs)
|
self._load_config_pairs(configs)
|
||||||
self._timestamp = int(time.time())
|
self._timestamp = int(time.time())
|
||||||
|
@ -13,26 +13,11 @@ Compare results of two render_pictures runs.
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
# Imports from within Skia
|
# Imports from within Skia
|
||||||
#
|
import fix_pythonpath # must do this first
|
||||||
# TODO(epoger): Once we move the create_filepath_url() function out of
|
from pyutils import url_utils
|
||||||
# download_actuals into a shared utility module, we won't need to import
|
|
||||||
# download_actuals anymore.
|
|
||||||
#
|
|
||||||
# We need to add the 'gm' directory, so that we can import gm_json.py within
|
|
||||||
# that directory. That script allows us to parse the actual-results.json file
|
|
||||||
# written out by the GM tool.
|
|
||||||
# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
|
|
||||||
# so any dirs that are already in the PYTHONPATH will be preferred.
|
|
||||||
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
GM_DIRECTORY = os.path.dirname(PARENT_DIRECTORY)
|
|
||||||
TRUNK_DIRECTORY = os.path.dirname(GM_DIRECTORY)
|
|
||||||
if GM_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(GM_DIRECTORY)
|
|
||||||
import download_actuals
|
|
||||||
import gm_json
|
import gm_json
|
||||||
import imagediffdb
|
import imagediffdb
|
||||||
import imagepair
|
import imagepair
|
||||||
@ -74,7 +59,7 @@ class RenderedPicturesComparisons(results.BaseComparisons):
|
|||||||
self._image_base_url = image_base_url
|
self._image_base_url = image_base_url
|
||||||
self._diff_base_url = (
|
self._diff_base_url = (
|
||||||
diff_base_url or
|
diff_base_url or
|
||||||
download_actuals.create_filepath_url(generated_images_root))
|
url_utils.create_filepath_url(generated_images_root))
|
||||||
self._load_result_pairs(actuals_root, subdirs)
|
self._load_result_pairs(actuals_root, subdirs)
|
||||||
self._timestamp = int(time.time())
|
self._timestamp = int(time.time())
|
||||||
logging.info('Results complete; took %d seconds.' %
|
logging.info('Results complete; took %d seconds.' %
|
||||||
|
@ -20,22 +20,8 @@ import sys
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
# Imports from within Skia
|
# Imports from within Skia
|
||||||
#
|
import fix_pythonpath # must do this first
|
||||||
# TODO(epoger): Once we move the create_filepath_url() function out of
|
from pyutils import url_utils
|
||||||
# download_actuals into a shared utility module, we won't need to import
|
|
||||||
# download_actuals anymore.
|
|
||||||
#
|
|
||||||
# We need to add the 'gm' directory, so that we can import gm_json.py within
|
|
||||||
# that directory. That script allows us to parse the actual-results.json file
|
|
||||||
# written out by the GM tool.
|
|
||||||
# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
|
|
||||||
# so any dirs that are already in the PYTHONPATH will be preferred.
|
|
||||||
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
GM_DIRECTORY = os.path.dirname(PARENT_DIRECTORY)
|
|
||||||
TRUNK_DIRECTORY = os.path.dirname(GM_DIRECTORY)
|
|
||||||
if GM_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(GM_DIRECTORY)
|
|
||||||
import download_actuals
|
|
||||||
import gm_json
|
import gm_json
|
||||||
import imagediffdb
|
import imagediffdb
|
||||||
import imagepair
|
import imagepair
|
||||||
@ -47,6 +33,7 @@ EXPECTATION_FIELDS_PASSED_THRU_VERBATIM = [
|
|||||||
results.KEY__EXPECTATIONS__IGNOREFAILURE,
|
results.KEY__EXPECTATIONS__IGNOREFAILURE,
|
||||||
results.KEY__EXPECTATIONS__REVIEWED,
|
results.KEY__EXPECTATIONS__REVIEWED,
|
||||||
]
|
]
|
||||||
|
TRUNK_DIRECTORY = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||||
DEFAULT_EXPECTATIONS_DIR = os.path.join(TRUNK_DIRECTORY, 'expectations', 'gm')
|
DEFAULT_EXPECTATIONS_DIR = os.path.join(TRUNK_DIRECTORY, 'expectations', 'gm')
|
||||||
DEFAULT_IGNORE_FAILURES_FILE = 'ignored-tests.txt'
|
DEFAULT_IGNORE_FAILURES_FILE = 'ignored-tests.txt'
|
||||||
|
|
||||||
@ -88,7 +75,7 @@ class ExpectationComparisons(results.BaseComparisons):
|
|||||||
self._image_diff_db = imagediffdb.ImageDiffDB(generated_images_root)
|
self._image_diff_db = imagediffdb.ImageDiffDB(generated_images_root)
|
||||||
self._diff_base_url = (
|
self._diff_base_url = (
|
||||||
diff_base_url or
|
diff_base_url or
|
||||||
download_actuals.create_filepath_url(generated_images_root))
|
url_utils.create_filepath_url(generated_images_root))
|
||||||
self._actuals_root = actuals_root
|
self._actuals_root = actuals_root
|
||||||
self._expected_root = expected_root
|
self._expected_root = expected_root
|
||||||
self._ignore_failures_on_these_tests = []
|
self._ignore_failures_on_these_tests = []
|
||||||
|
@ -10,44 +10,19 @@ Download actual GM results for a particular builder.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# System-level imports
|
# System-level imports
|
||||||
import contextlib
|
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
import urllib
|
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
|
||||||
|
|
||||||
# Imports from within Skia
|
# Imports from within Skia
|
||||||
#
|
import fix_pythonpath # must do this first
|
||||||
# We need to add the 'gm' and 'tools' directories, so that we can import
|
from pyutils import gs_utils
|
||||||
# gm_json.py and buildbot_globals.py.
|
from pyutils import url_utils
|
||||||
#
|
|
||||||
# Make sure that these dirs are in the PYTHONPATH, but add them at the *end*
|
|
||||||
# so any dirs that are already in the PYTHONPATH will be preferred.
|
|
||||||
#
|
|
||||||
# TODO(epoger): Is it OK for this to depend on the 'tools' dir, given that
|
|
||||||
# the tools dir is dependent on the 'gm' dir (to import gm_json.py)?
|
|
||||||
TRUNK_DIRECTORY = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
|
||||||
GM_DIRECTORY = os.path.join(TRUNK_DIRECTORY, 'gm')
|
|
||||||
TOOLS_DIRECTORY = os.path.join(TRUNK_DIRECTORY, 'tools')
|
|
||||||
if GM_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(GM_DIRECTORY)
|
|
||||||
if TOOLS_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(TOOLS_DIRECTORY)
|
|
||||||
import buildbot_globals
|
import buildbot_globals
|
||||||
import gm_json
|
import gm_json
|
||||||
|
|
||||||
# Imports from third-party code
|
|
||||||
APICLIENT_DIRECTORY = os.path.join(
|
|
||||||
TRUNK_DIRECTORY, 'third_party', 'externals', 'google-api-python-client')
|
|
||||||
if APICLIENT_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(APICLIENT_DIRECTORY)
|
|
||||||
from googleapiclient.discovery import build as build_service
|
|
||||||
|
|
||||||
|
|
||||||
GM_SUMMARIES_BUCKET = buildbot_globals.Get('gm_summaries_bucket')
|
GM_SUMMARIES_BUCKET = buildbot_globals.Get('gm_summaries_bucket')
|
||||||
DEFAULT_ACTUALS_BASE_URL = (
|
DEFAULT_ACTUALS_BASE_URL = (
|
||||||
@ -105,98 +80,19 @@ class Download(object):
|
|||||||
test_name=test, hash_type=hash_type, hash_digest=hash_digest,
|
test_name=test, hash_type=hash_type, hash_digest=hash_digest,
|
||||||
gm_actuals_root_url=self._gm_actuals_root_url)
|
gm_actuals_root_url=self._gm_actuals_root_url)
|
||||||
dest_path = os.path.join(dest_dir, config, test + '.png')
|
dest_path = os.path.join(dest_dir, config, test + '.png')
|
||||||
# TODO(epoger): To speed this up, we should only download files that
|
url_utils.copy_contents(source_url=source_url, dest_path=dest_path,
|
||||||
# we don't already have on local disk.
|
create_subdirs_if_needed=True)
|
||||||
copy_contents(source_url=source_url, dest_path=dest_path,
|
|
||||||
create_subdirs_if_needed=True)
|
|
||||||
|
|
||||||
|
|
||||||
def create_filepath_url(filepath):
|
def get_builders_list(summaries_bucket=GM_SUMMARIES_BUCKET):
|
||||||
""" Returns a file:/// URL pointing at the given filepath on local disk.
|
""" Returns the list of builders we have actual results for.
|
||||||
|
|
||||||
For now, this is only used by unittests, but I anticipate it being useful
|
|
||||||
in production, as a way for developers to run rebaseline_server over locally
|
|
||||||
generated images.
|
|
||||||
|
|
||||||
TODO(epoger): Move this function, and copy_contents(), into a shared
|
|
||||||
utility module. They are generally useful.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
filepath: string; path to a file on local disk (may be absolute or relative,
|
summaries_bucket: Google Cloud Storage bucket containing the summary
|
||||||
and the file does not need to exist)
|
JSON files
|
||||||
|
|
||||||
Returns:
|
|
||||||
A file:/// URL pointing at the file. Regardless of whether filepath was
|
|
||||||
specified as a relative or absolute path, the URL will contain an
|
|
||||||
absolute path to the file.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
An Exception, if filepath is already a URL.
|
|
||||||
"""
|
"""
|
||||||
if urlparse.urlparse(filepath).scheme:
|
dirs, _ = gs_utils.list_bucket_contents(bucket=GM_SUMMARIES_BUCKET)
|
||||||
raise Exception('"%s" is already a URL' % filepath)
|
return dirs
|
||||||
return urlparse.urljoin(
|
|
||||||
'file:', urllib.pathname2url(os.path.abspath(filepath)))
|
|
||||||
|
|
||||||
|
|
||||||
def copy_contents(source_url, dest_path, create_subdirs_if_needed=False):
|
|
||||||
""" Copies the full contents of the URL 'source_url' into
|
|
||||||
filepath 'dest_path'.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
source_url: string; complete URL to read from
|
|
||||||
dest_path: string; complete filepath to write to (may be absolute or
|
|
||||||
relative)
|
|
||||||
create_subdirs_if_needed: boolean; whether to create subdirectories as
|
|
||||||
needed to create dest_path
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
Some subclass of Exception if unable to read source_url or write dest_path.
|
|
||||||
"""
|
|
||||||
if create_subdirs_if_needed:
|
|
||||||
dest_dir = os.path.dirname(dest_path)
|
|
||||||
if not os.path.exists(dest_dir):
|
|
||||||
os.makedirs(dest_dir)
|
|
||||||
with contextlib.closing(urllib.urlopen(source_url)) as source_handle:
|
|
||||||
with open(dest_path, 'wb') as dest_handle:
|
|
||||||
shutil.copyfileobj(fsrc=source_handle, fdst=dest_handle)
|
|
||||||
|
|
||||||
|
|
||||||
def gcs_list_bucket_contents(bucket, subdir=None):
|
|
||||||
""" Returns files in the Google Cloud Storage bucket as a (dirs, files) tuple.
|
|
||||||
|
|
||||||
Uses the API documented at
|
|
||||||
https://developers.google.com/storage/docs/json_api/v1/objects/list
|
|
||||||
|
|
||||||
Args:
|
|
||||||
bucket: name of the Google Storage bucket
|
|
||||||
subdir: directory within the bucket to list, or None for root directory
|
|
||||||
"""
|
|
||||||
# The GCS command relies on the subdir name (if any) ending with a slash.
|
|
||||||
if subdir and not subdir.endswith('/'):
|
|
||||||
subdir += '/'
|
|
||||||
subdir_length = len(subdir) if subdir else 0
|
|
||||||
|
|
||||||
storage = build_service('storage', 'v1')
|
|
||||||
command = storage.objects().list(
|
|
||||||
bucket=bucket, delimiter='/', fields='items(name),prefixes',
|
|
||||||
prefix=subdir)
|
|
||||||
results = command.execute()
|
|
||||||
|
|
||||||
# The GCS command returned two subdicts:
|
|
||||||
# prefixes: the full path of every directory within subdir, with trailing '/'
|
|
||||||
# items: property dict for each file object within subdir
|
|
||||||
# (including 'name', which is full path of the object)
|
|
||||||
dirs = []
|
|
||||||
for dir_fullpath in results.get('prefixes', []):
|
|
||||||
dir_basename = dir_fullpath[subdir_length:]
|
|
||||||
dirs.append(dir_basename[:-1]) # strip trailing slash
|
|
||||||
files = []
|
|
||||||
for file_properties in results.get('items', []):
|
|
||||||
file_fullpath = file_properties['name']
|
|
||||||
file_basename = file_fullpath[subdir_length:]
|
|
||||||
files.append(file_basename)
|
|
||||||
return (dirs, files)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -234,8 +130,7 @@ def main():
|
|||||||
(params, remaining_args) = parser.parse_args()
|
(params, remaining_args) = parser.parse_args()
|
||||||
|
|
||||||
if params.list_builders:
|
if params.list_builders:
|
||||||
dirs, _ = gcs_list_bucket_contents(bucket=GM_SUMMARIES_BUCKET)
|
print '\n'.join(get_builders_list())
|
||||||
print '\n'.join(dirs)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# Make sure all required options were set,
|
# Make sure all required options were set,
|
||||||
|
@ -25,6 +25,8 @@ import tempfile
|
|||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
# Imports from within Skia
|
# Imports from within Skia
|
||||||
|
import fix_pythonpath # must do this first
|
||||||
|
from pyutils import url_utils
|
||||||
import base_unittest
|
import base_unittest
|
||||||
import download_actuals
|
import download_actuals
|
||||||
|
|
||||||
@ -34,52 +36,14 @@ class DownloadTest(base_unittest.TestCase):
|
|||||||
def test_fetch(self):
|
def test_fetch(self):
|
||||||
"""Tests fetch() of GM results from actual-results.json ."""
|
"""Tests fetch() of GM results from actual-results.json ."""
|
||||||
downloader = download_actuals.Download(
|
downloader = download_actuals.Download(
|
||||||
actuals_base_url=download_actuals.create_filepath_url(
|
actuals_base_url=url_utils.create_filepath_url(
|
||||||
os.path.join(self._input_dir, 'gm-actuals')),
|
os.path.join(self._input_dir, 'gm-actuals')),
|
||||||
gm_actuals_root_url=download_actuals.create_filepath_url(
|
gm_actuals_root_url=url_utils.create_filepath_url(
|
||||||
os.path.join(self._input_dir, 'fake-gm-imagefiles')))
|
os.path.join(self._input_dir, 'fake-gm-imagefiles')))
|
||||||
downloader.fetch(
|
downloader.fetch(
|
||||||
builder_name='Test-Android-GalaxyNexus-SGX540-Arm7-Release',
|
builder_name='Test-Android-GalaxyNexus-SGX540-Arm7-Release',
|
||||||
dest_dir=self._output_dir_actual)
|
dest_dir=self._output_dir_actual)
|
||||||
|
|
||||||
def test_create_filepath_url(self):
|
|
||||||
"""Tests create_filepath_url(). """
|
|
||||||
with self.assertRaises(Exception):
|
|
||||||
url_or_path.create_filepath_url('http://1.2.3.4/path')
|
|
||||||
# Pass absolute filepath.
|
|
||||||
self.assertEquals(
|
|
||||||
download_actuals.create_filepath_url(
|
|
||||||
'%sdir%sfile' % (os.path.sep, os.path.sep)),
|
|
||||||
'file:///dir/file')
|
|
||||||
# Pass relative filepath.
|
|
||||||
self.assertEquals(
|
|
||||||
download_actuals.create_filepath_url(os.path.join('dir', 'file')),
|
|
||||||
'file://%s/dir/file' % urllib.pathname2url(os.getcwd()))
|
|
||||||
|
|
||||||
def test_copy_contents(self):
|
|
||||||
"""Tests copy_contents(). """
|
|
||||||
contents = 'these are the contents'
|
|
||||||
tempdir_path = tempfile.mkdtemp()
|
|
||||||
try:
|
|
||||||
source_path = os.path.join(tempdir_path, 'source')
|
|
||||||
source_url = download_actuals.create_filepath_url(source_path)
|
|
||||||
with open(source_path, 'w') as source_handle:
|
|
||||||
source_handle.write(contents)
|
|
||||||
dest_path = os.path.join(tempdir_path, 'new_subdir', 'dest')
|
|
||||||
# Destination subdir does not exist, so copy_contents() should fail
|
|
||||||
# if create_subdirs_if_needed is False.
|
|
||||||
with self.assertRaises(Exception):
|
|
||||||
download_actuals.copy_contents(source_url=source_url,
|
|
||||||
dest_path=dest_path,
|
|
||||||
create_subdirs_if_needed=False)
|
|
||||||
# If create_subdirs_if_needed is True, it should work.
|
|
||||||
download_actuals.copy_contents(source_url=source_url,
|
|
||||||
dest_path=dest_path,
|
|
||||||
create_subdirs_if_needed=True)
|
|
||||||
self.assertEquals(open(dest_path).read(), contents)
|
|
||||||
finally:
|
|
||||||
shutil.rmtree(tempdir_path)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
base_unittest.main(DownloadTest)
|
base_unittest.main(DownloadTest)
|
||||||
|
21
gm/rebaseline_server/fix_pythonpath.py
Executable file
21
gm/rebaseline_server/fix_pythonpath.py
Executable file
@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright 2014 Google Inc.
|
||||||
|
|
||||||
|
Use of this source code is governed by a BSD-style license that can be
|
||||||
|
found in the LICENSE file.
|
||||||
|
|
||||||
|
Adds [trunk]/gm and [trunk]/tools to PYTHONPATH, if they aren't already there.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
TRUNK_DIRECTORY = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
GM_DIRECTORY = os.path.join(TRUNK_DIRECTORY, 'gm')
|
||||||
|
TOOLS_DIRECTORY = os.path.join(TRUNK_DIRECTORY, 'tools')
|
||||||
|
if GM_DIRECTORY not in sys.path:
|
||||||
|
sys.path.append(GM_DIRECTORY)
|
||||||
|
if TOOLS_DIRECTORY not in sys.path:
|
||||||
|
sys.path.append(TOOLS_DIRECTORY)
|
@ -13,19 +13,9 @@ Repackage expected/actual GM results as needed by our HTML rebaseline viewer.
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
|
|
||||||
# Imports from within Skia
|
# Imports from within Skia
|
||||||
#
|
import fix_pythonpath # must do this first
|
||||||
# We need to add the 'gm' directory, so that we can import gm_json.py within
|
|
||||||
# that directory. That script allows us to parse the actual-results.json file
|
|
||||||
# written out by the GM tool.
|
|
||||||
# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
|
|
||||||
# so any dirs that are already in the PYTHONPATH will be preferred.
|
|
||||||
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
GM_DIRECTORY = os.path.dirname(PARENT_DIRECTORY)
|
|
||||||
if GM_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(GM_DIRECTORY)
|
|
||||||
import gm_json
|
import gm_json
|
||||||
import imagepairset
|
import imagepairset
|
||||||
|
|
||||||
@ -57,6 +47,7 @@ KEY__RESULT_TYPE__SUCCEEDED = gm_json.JSONKEY_ACTUALRESULTS_SUCCEEDED
|
|||||||
IMAGE_FILENAME_RE = re.compile(gm_json.IMAGE_FILENAME_PATTERN)
|
IMAGE_FILENAME_RE = re.compile(gm_json.IMAGE_FILENAME_PATTERN)
|
||||||
IMAGE_FILENAME_FORMATTER = '%s_%s.png' # pass in (testname, config)
|
IMAGE_FILENAME_FORMATTER = '%s_%s.png' # pass in (testname, config)
|
||||||
|
|
||||||
|
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
|
||||||
DEFAULT_ACTUALS_DIR = '.gm-actuals'
|
DEFAULT_ACTUALS_DIR = '.gm-actuals'
|
||||||
DEFAULT_GENERATED_IMAGES_ROOT = os.path.join(
|
DEFAULT_GENERATED_IMAGES_ROOT = os.path.join(
|
||||||
PARENT_DIRECTORY, '.generated-images')
|
PARENT_DIRECTORY, '.generated-images')
|
||||||
|
@ -20,28 +20,14 @@ import re
|
|||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
import thread
|
import thread
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
# Imports from within Skia
|
# Imports from within Skia
|
||||||
#
|
import fix_pythonpath # must do this first
|
||||||
# We need to add the 'tools' directory for svn.py, and the 'gm' directory for
|
from pyutils import gs_utils
|
||||||
# gm_json.py .
|
|
||||||
# that directory.
|
|
||||||
# Make sure that the 'tools' dir is in the PYTHONPATH, but add it at the *end*
|
|
||||||
# so any dirs that are already in the PYTHONPATH will be preferred.
|
|
||||||
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
GM_DIRECTORY = os.path.dirname(PARENT_DIRECTORY)
|
|
||||||
TRUNK_DIRECTORY = os.path.dirname(GM_DIRECTORY)
|
|
||||||
TOOLS_DIRECTORY = os.path.join(TRUNK_DIRECTORY, 'tools')
|
|
||||||
if TOOLS_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(TOOLS_DIRECTORY)
|
|
||||||
import svn
|
|
||||||
if GM_DIRECTORY not in sys.path:
|
|
||||||
sys.path.append(GM_DIRECTORY)
|
|
||||||
import gm_json
|
import gm_json
|
||||||
|
|
||||||
# Imports from local dir
|
# Imports from local dir
|
||||||
@ -51,6 +37,7 @@ import gm_json
|
|||||||
# https://codereview.chromium.org/195943004/diff/1/gm/rebaseline_server/server.py#newcode44
|
# https://codereview.chromium.org/195943004/diff/1/gm/rebaseline_server/server.py#newcode44
|
||||||
import compare_configs
|
import compare_configs
|
||||||
import compare_to_expectations
|
import compare_to_expectations
|
||||||
|
import download_actuals
|
||||||
import imagepairset
|
import imagepairset
|
||||||
import results as results_mod
|
import results as results_mod
|
||||||
|
|
||||||
@ -74,10 +61,12 @@ KEY__EDITS__OLD_RESULTS_HASH = 'oldResultsHash'
|
|||||||
KEY__EDITS__OLD_RESULTS_TYPE = 'oldResultsType'
|
KEY__EDITS__OLD_RESULTS_TYPE = 'oldResultsType'
|
||||||
|
|
||||||
DEFAULT_ACTUALS_DIR = results_mod.DEFAULT_ACTUALS_DIR
|
DEFAULT_ACTUALS_DIR = results_mod.DEFAULT_ACTUALS_DIR
|
||||||
DEFAULT_ACTUALS_REPO_REVISION = 'HEAD'
|
DEFAULT_GM_SUMMARIES_BUCKET = download_actuals.GM_SUMMARIES_BUCKET
|
||||||
DEFAULT_ACTUALS_REPO_URL = 'http://skia-autogen.googlecode.com/svn/gm-actual'
|
DEFAULT_JSON_FILENAME = download_actuals.DEFAULT_JSON_FILENAME
|
||||||
DEFAULT_PORT = 8888
|
DEFAULT_PORT = 8888
|
||||||
|
|
||||||
|
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
TRUNK_DIRECTORY = os.path.dirname(os.path.dirname(PARENT_DIRECTORY))
|
||||||
# Directory, relative to PARENT_DIRECTORY, within which the server will serve
|
# Directory, relative to PARENT_DIRECTORY, within which the server will serve
|
||||||
# out live results (not static files).
|
# out live results (not static files).
|
||||||
RESULTS_SUBDIR = 'results'
|
RESULTS_SUBDIR = 'results'
|
||||||
@ -139,24 +128,6 @@ def _get_routable_ip_address():
|
|||||||
return host
|
return host
|
||||||
|
|
||||||
|
|
||||||
def _create_svn_checkout(dir_path, repo_url):
|
|
||||||
"""Creates local checkout of an SVN repository at the specified directory
|
|
||||||
path, returning an svn.Svn object referring to the local checkout.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dir_path: path to the local checkout; if this directory does not yet exist,
|
|
||||||
it will be created and the repo will be checked out into it
|
|
||||||
repo_url: URL of SVN repo to check out into dir_path (unless the local
|
|
||||||
checkout already exists)
|
|
||||||
Returns: an svn.Svn object referring to the local checkout.
|
|
||||||
"""
|
|
||||||
local_checkout = svn.Svn(dir_path)
|
|
||||||
if not os.path.isdir(dir_path):
|
|
||||||
os.makedirs(dir_path)
|
|
||||||
local_checkout.Checkout(repo_url, '.')
|
|
||||||
return local_checkout
|
|
||||||
|
|
||||||
|
|
||||||
def _create_index(file_path, config_pairs):
|
def _create_index(file_path, config_pairs):
|
||||||
"""Creates an index file linking to all results available from this server.
|
"""Creates an index file linking to all results available from this server.
|
||||||
|
|
||||||
@ -213,18 +184,18 @@ class Server(object):
|
|||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
actuals_dir=DEFAULT_ACTUALS_DIR,
|
actuals_dir=DEFAULT_ACTUALS_DIR,
|
||||||
actuals_repo_revision=DEFAULT_ACTUALS_REPO_REVISION,
|
json_filename=DEFAULT_JSON_FILENAME,
|
||||||
actuals_repo_url=DEFAULT_ACTUALS_REPO_URL,
|
gm_summaries_bucket=DEFAULT_GM_SUMMARIES_BUCKET,
|
||||||
port=DEFAULT_PORT, export=False, editable=True,
|
port=DEFAULT_PORT, export=False, editable=True,
|
||||||
reload_seconds=0, config_pairs=None, builder_regex_list=None):
|
reload_seconds=0, config_pairs=None, builder_regex_list=None):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
actuals_dir: directory under which we will check out the latest actual
|
actuals_dir: directory under which we will check out the latest actual
|
||||||
GM results
|
GM results
|
||||||
actuals_repo_revision: revision of actual-results.json files to process
|
json_filename: basename of the JSON summary file to load for each builder
|
||||||
actuals_repo_url: SVN repo to download actual-results.json files from;
|
gm_summaries_bucket: Google Storage bucket to download json_filename
|
||||||
if None or '', don't fetch new actual-results files at all,
|
files from; if None or '', don't fetch new actual-results files
|
||||||
just compare to whatever files are already in actuals_dir
|
at all, just compare to whatever files are already in actuals_dir
|
||||||
port: which TCP port to listen on for HTTP requests
|
port: which TCP port to listen on for HTTP requests
|
||||||
export: whether to allow HTTP clients on other hosts to access this server
|
export: whether to allow HTTP clients on other hosts to access this server
|
||||||
editable: whether HTTP clients are allowed to submit new baselines
|
editable: whether HTTP clients are allowed to submit new baselines
|
||||||
@ -237,8 +208,8 @@ class Server(object):
|
|||||||
we will process. If None, process all builders.
|
we will process. If None, process all builders.
|
||||||
"""
|
"""
|
||||||
self._actuals_dir = actuals_dir
|
self._actuals_dir = actuals_dir
|
||||||
self._actuals_repo_revision = actuals_repo_revision
|
self._json_filename = json_filename
|
||||||
self._actuals_repo_url = actuals_repo_url
|
self._gm_summaries_bucket = gm_summaries_bucket
|
||||||
self._port = port
|
self._port = port
|
||||||
self._export = export
|
self._export = export
|
||||||
self._editable = editable
|
self._editable = editable
|
||||||
@ -250,11 +221,6 @@ class Server(object):
|
|||||||
PARENT_DIRECTORY, STATIC_CONTENTS_SUBDIR, GENERATED_HTML_SUBDIR,
|
PARENT_DIRECTORY, STATIC_CONTENTS_SUBDIR, GENERATED_HTML_SUBDIR,
|
||||||
"index.html"),
|
"index.html"),
|
||||||
config_pairs=config_pairs)
|
config_pairs=config_pairs)
|
||||||
# TODO(epoger): Create shareable functions within download_actuals.py that
|
|
||||||
# we can use both there and here to download the actual image results.
|
|
||||||
if actuals_repo_url:
|
|
||||||
self._actuals_repo = _create_svn_checkout(
|
|
||||||
dir_path=actuals_dir, repo_url=actuals_repo_url)
|
|
||||||
|
|
||||||
# Reentrant lock that must be held whenever updating EITHER of:
|
# Reentrant lock that must be held whenever updating EITHER of:
|
||||||
# 1. self._results
|
# 1. self._results
|
||||||
@ -302,26 +268,66 @@ class Server(object):
|
|||||||
with self.results_rlock:
|
with self.results_rlock:
|
||||||
if invalidate:
|
if invalidate:
|
||||||
self._results = None
|
self._results = None
|
||||||
if self._actuals_repo_url:
|
if self._gm_summaries_bucket:
|
||||||
logging.info(
|
logging.info(
|
||||||
'Updating actual GM results in %s to revision %s from repo %s ...'
|
'Updating GM result summaries in %s from gm_summaries_bucket %s ...'
|
||||||
% (
|
% (self._actuals_dir, self._gm_summaries_bucket))
|
||||||
self._actuals_dir, self._actuals_repo_revision,
|
|
||||||
self._actuals_repo_url))
|
# Clean out actuals_dir first, in case some builders have gone away
|
||||||
self._actuals_repo.Update(
|
# since we last ran.
|
||||||
path='.', revision=self._actuals_repo_revision)
|
if os.path.isdir(self._actuals_dir):
|
||||||
|
shutil.rmtree(self._actuals_dir)
|
||||||
|
|
||||||
|
# Get the list of builders we care about.
|
||||||
|
all_builders = download_actuals.get_builders_list(
|
||||||
|
summaries_bucket=self._gm_summaries_bucket)
|
||||||
|
if self._builder_regex_list:
|
||||||
|
matching_builders = []
|
||||||
|
for builder in all_builders:
|
||||||
|
for regex in self._builder_regex_list:
|
||||||
|
if re.match(regex, builder):
|
||||||
|
matching_builders.append(builder)
|
||||||
|
break # go on to the next builder, no need to try more regexes
|
||||||
|
else:
|
||||||
|
matching_builders = all_builders
|
||||||
|
|
||||||
|
# Download the JSON file for each builder we care about.
|
||||||
|
#
|
||||||
|
# TODO(epoger): When this is a large number of builders, we would be
|
||||||
|
# better off downloading them in parallel!
|
||||||
|
for builder in matching_builders:
|
||||||
|
gs_utils.download_file(
|
||||||
|
source_bucket=self._gm_summaries_bucket,
|
||||||
|
source_path=posixpath.join(builder, self._json_filename),
|
||||||
|
dest_path=os.path.join(self._actuals_dir, builder,
|
||||||
|
self._json_filename),
|
||||||
|
create_subdirs_if_needed=True)
|
||||||
|
|
||||||
# We only update the expectations dir if the server was run with a
|
# We only update the expectations dir if the server was run with a
|
||||||
# nonzero --reload argument; otherwise, we expect the user to maintain
|
# nonzero --reload argument; otherwise, we expect the user to maintain
|
||||||
# her own expectations as she sees fit.
|
# her own expectations as she sees fit.
|
||||||
#
|
#
|
||||||
# Because the Skia repo is moving from SVN to git, and git does not
|
# Because the Skia repo is hosted using git, and git does not
|
||||||
# support updating a single directory tree, we have to update the entire
|
# support updating a single directory tree, we have to update the entire
|
||||||
# repo checkout.
|
# repo checkout.
|
||||||
#
|
#
|
||||||
# Because Skia uses depot_tools, we have to update using "gclient sync"
|
# Because Skia uses depot_tools, we have to update using "gclient sync"
|
||||||
# instead of raw git (or SVN) update. Happily, this will work whether
|
# instead of raw git commands.
|
||||||
# the checkout was created using git or SVN.
|
#
|
||||||
|
# TODO(epoger): Fetch latest expectations in some other way.
|
||||||
|
# Eric points out that our official documentation recommends an
|
||||||
|
# unmanaged Skia checkout, so "gclient sync" will not bring down updated
|
||||||
|
# expectations from origin/master-- you'd have to do a "git pull" of
|
||||||
|
# some sort instead.
|
||||||
|
# However, the live rebaseline_server at
|
||||||
|
# http://skia-tree-status.appspot.com/redirect/rebaseline-server (which
|
||||||
|
# is probably the only user of the --reload flag!) uses a managed
|
||||||
|
# checkout, so "gclient sync" works in that case.
|
||||||
|
# Probably the best idea is to avoid all of this nonsense by fetching
|
||||||
|
# updated expectations into a temp directory, and leaving the rest of
|
||||||
|
# the checkout alone. This could be done using "git show", or by
|
||||||
|
# downloading individual expectation JSON files from
|
||||||
|
# skia.googlesource.com .
|
||||||
if self._reload_seconds:
|
if self._reload_seconds:
|
||||||
logging.info(
|
logging.info(
|
||||||
'Updating expected GM results in %s by syncing Skia repo ...' %
|
'Updating expected GM results in %s by syncing Skia repo ...' %
|
||||||
@ -623,18 +629,11 @@ def main():
|
|||||||
'actual GM results. If this directory does not '
|
'actual GM results. If this directory does not '
|
||||||
'exist, it will be created. Defaults to %(default)s'),
|
'exist, it will be created. Defaults to %(default)s'),
|
||||||
default=DEFAULT_ACTUALS_DIR)
|
default=DEFAULT_ACTUALS_DIR)
|
||||||
parser.add_argument('--actuals-repo',
|
# TODO(epoger): Before https://codereview.chromium.org/310093003 ,
|
||||||
help=('URL of SVN repo to download actual-results.json '
|
# when this tool downloaded the JSON summaries from skia-autogen,
|
||||||
'files from. Defaults to %(default)s ; if set to '
|
# it had an --actuals-revision the caller could specify to download
|
||||||
'empty string, just compare to actual-results '
|
# actual results as of a specific point in time. We should add similar
|
||||||
'already found in ACTUALS_DIR.'),
|
# functionality when retrieving the summaries from Google Storage.
|
||||||
default=DEFAULT_ACTUALS_REPO_URL)
|
|
||||||
parser.add_argument('--actuals-revision',
|
|
||||||
help=('revision of actual-results.json files to process. '
|
|
||||||
'Defaults to %(default)s . Beware of setting this '
|
|
||||||
'argument in conjunction with --editable; you '
|
|
||||||
'probably only want to edit results at HEAD.'),
|
|
||||||
default=DEFAULT_ACTUALS_REPO_REVISION)
|
|
||||||
parser.add_argument('--builders', metavar='BUILDER_REGEX', nargs='+',
|
parser.add_argument('--builders', metavar='BUILDER_REGEX', nargs='+',
|
||||||
help=('Only process builders matching these regular '
|
help=('Only process builders matching these regular '
|
||||||
'expressions. If unspecified, process all '
|
'expressions. If unspecified, process all '
|
||||||
@ -652,6 +651,17 @@ def main():
|
|||||||
'to access this server. WARNING: doing so will '
|
'to access this server. WARNING: doing so will '
|
||||||
'allow users on other hosts to modify your '
|
'allow users on other hosts to modify your '
|
||||||
'GM expectations, if combined with --editable.'))
|
'GM expectations, if combined with --editable.'))
|
||||||
|
parser.add_argument('--gm-summaries-bucket',
|
||||||
|
help=('Google Cloud Storage bucket to download '
|
||||||
|
'JSON_FILENAME files from. '
|
||||||
|
'Defaults to %(default)s ; if set to '
|
||||||
|
'empty string, just compare to actual-results '
|
||||||
|
'already found in ACTUALS_DIR.'),
|
||||||
|
default=DEFAULT_GM_SUMMARIES_BUCKET)
|
||||||
|
parser.add_argument('--json-filename',
|
||||||
|
help=('JSON summary filename to read for each builder; '
|
||||||
|
'defaults to %(default)s.'),
|
||||||
|
default=DEFAULT_JSON_FILENAME)
|
||||||
parser.add_argument('--port', type=int,
|
parser.add_argument('--port', type=int,
|
||||||
help=('Which TCP port to listen on for HTTP requests; '
|
help=('Which TCP port to listen on for HTTP requests; '
|
||||||
'defaults to %(default)s'),
|
'defaults to %(default)s'),
|
||||||
@ -672,8 +682,8 @@ def main():
|
|||||||
|
|
||||||
global _SERVER
|
global _SERVER
|
||||||
_SERVER = Server(actuals_dir=args.actuals_dir,
|
_SERVER = Server(actuals_dir=args.actuals_dir,
|
||||||
actuals_repo_revision=args.actuals_revision,
|
json_filename=args.json_filename,
|
||||||
actuals_repo_url=args.actuals_repo,
|
gm_summaries_bucket=args.gm_summaries_bucket,
|
||||||
port=args.port, export=args.export, editable=args.editable,
|
port=args.port, export=args.export, editable=args.editable,
|
||||||
reload_seconds=args.reload, config_pairs=config_pairs,
|
reload_seconds=args.reload, config_pairs=config_pairs,
|
||||||
builder_regex_list=args.builders)
|
builder_regex_list=args.builders)
|
||||||
|
0
tools/pyutils/__init__.py
Normal file
0
tools/pyutils/__init__.py
Normal file
81
tools/pyutils/gs_utils.py
Executable file
81
tools/pyutils/gs_utils.py
Executable file
@ -0,0 +1,81 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright 2014 Google Inc.
|
||||||
|
|
||||||
|
Use of this source code is governed by a BSD-style license that can be
|
||||||
|
found in the LICENSE file.
|
||||||
|
|
||||||
|
Utilities for accessing Google Cloud Storage.
|
||||||
|
|
||||||
|
TODO(epoger): move this into tools/utils for broader use?
|
||||||
|
"""
|
||||||
|
|
||||||
|
# System-level imports
|
||||||
|
import os
|
||||||
|
import posixpath
|
||||||
|
import sys
|
||||||
|
try:
|
||||||
|
from apiclient.discovery import build as build_service
|
||||||
|
except ImportError:
|
||||||
|
print ('Missing google-api-python-client. Please install it; directions '
|
||||||
|
'can be found at https://developers.google.com/api-client-library/'
|
||||||
|
'python/start/installation')
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Local imports
|
||||||
|
import url_utils
|
||||||
|
|
||||||
|
|
||||||
|
def download_file(source_bucket, source_path, dest_path,
|
||||||
|
create_subdirs_if_needed=False):
|
||||||
|
""" Downloads a single file from Google Cloud Storage to local disk.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_bucket: GCS bucket to download the file from
|
||||||
|
source_path: full path (Posix-style) within that bucket
|
||||||
|
dest_path: full path (local-OS-style) on local disk to copy the file to
|
||||||
|
create_subdirs_if_needed: boolean; whether to create subdirectories as
|
||||||
|
needed to create dest_path
|
||||||
|
"""
|
||||||
|
source_http_url = posixpath.join(
|
||||||
|
'http://storage.googleapis.com', source_bucket, source_path)
|
||||||
|
url_utils.copy_contents(source_url=source_http_url, dest_path=dest_path,
|
||||||
|
create_subdirs_if_needed=create_subdirs_if_needed)
|
||||||
|
|
||||||
|
|
||||||
|
def list_bucket_contents(bucket, subdir=None):
|
||||||
|
""" Returns files in the Google Cloud Storage bucket as a (dirs, files) tuple.
|
||||||
|
|
||||||
|
Uses the API documented at
|
||||||
|
https://developers.google.com/storage/docs/json_api/v1/objects/list
|
||||||
|
|
||||||
|
Args:
|
||||||
|
bucket: name of the Google Storage bucket
|
||||||
|
subdir: directory within the bucket to list, or None for root directory
|
||||||
|
"""
|
||||||
|
# The GCS command relies on the subdir name (if any) ending with a slash.
|
||||||
|
if subdir and not subdir.endswith('/'):
|
||||||
|
subdir += '/'
|
||||||
|
subdir_length = len(subdir) if subdir else 0
|
||||||
|
|
||||||
|
storage = build_service('storage', 'v1')
|
||||||
|
command = storage.objects().list(
|
||||||
|
bucket=bucket, delimiter='/', fields='items(name),prefixes',
|
||||||
|
prefix=subdir)
|
||||||
|
results = command.execute()
|
||||||
|
|
||||||
|
# The GCS command returned two subdicts:
|
||||||
|
# prefixes: the full path of every directory within subdir, with trailing '/'
|
||||||
|
# items: property dict for each file object within subdir
|
||||||
|
# (including 'name', which is full path of the object)
|
||||||
|
dirs = []
|
||||||
|
for dir_fullpath in results.get('prefixes', []):
|
||||||
|
dir_basename = dir_fullpath[subdir_length:]
|
||||||
|
dirs.append(dir_basename[:-1]) # strip trailing slash
|
||||||
|
files = []
|
||||||
|
for file_properties in results.get('items', []):
|
||||||
|
file_fullpath = file_properties['name']
|
||||||
|
file_basename = file_fullpath[subdir_length:]
|
||||||
|
files.append(file_basename)
|
||||||
|
return (dirs, files)
|
63
tools/pyutils/url_utils.py
Executable file
63
tools/pyutils/url_utils.py
Executable file
@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright 2014 Google Inc.
|
||||||
|
|
||||||
|
Use of this source code is governed by a BSD-style license that can be
|
||||||
|
found in the LICENSE file.
|
||||||
|
|
||||||
|
Utilities for working with URLs.
|
||||||
|
|
||||||
|
TODO(epoger): move this into tools/utils for broader use?
|
||||||
|
"""
|
||||||
|
|
||||||
|
# System-level imports
|
||||||
|
import contextlib
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import urllib
|
||||||
|
import urlparse
|
||||||
|
|
||||||
|
|
||||||
|
def create_filepath_url(filepath):
|
||||||
|
""" Returns a file:/// URL pointing at the given filepath on local disk.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepath: string; path to a file on local disk (may be absolute or relative,
|
||||||
|
and the file does not need to exist)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A file:/// URL pointing at the file. Regardless of whether filepath was
|
||||||
|
specified as a relative or absolute path, the URL will contain an
|
||||||
|
absolute path to the file.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
An Exception, if filepath is already a URL.
|
||||||
|
"""
|
||||||
|
if urlparse.urlparse(filepath).scheme:
|
||||||
|
raise Exception('"%s" is already a URL' % filepath)
|
||||||
|
return urlparse.urljoin(
|
||||||
|
'file:', urllib.pathname2url(os.path.abspath(filepath)))
|
||||||
|
|
||||||
|
|
||||||
|
def copy_contents(source_url, dest_path, create_subdirs_if_needed=False):
|
||||||
|
""" Copies the full contents of the URL 'source_url' into
|
||||||
|
filepath 'dest_path'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_url: string; complete URL to read from
|
||||||
|
dest_path: string; complete filepath to write to (may be absolute or
|
||||||
|
relative)
|
||||||
|
create_subdirs_if_needed: boolean; whether to create subdirectories as
|
||||||
|
needed to create dest_path
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
Some subclass of Exception if unable to read source_url or write dest_path.
|
||||||
|
"""
|
||||||
|
if create_subdirs_if_needed:
|
||||||
|
dest_dir = os.path.dirname(dest_path)
|
||||||
|
if not os.path.exists(dest_dir):
|
||||||
|
os.makedirs(dest_dir)
|
||||||
|
with contextlib.closing(urllib.urlopen(source_url)) as source_handle:
|
||||||
|
with open(dest_path, 'wb') as dest_handle:
|
||||||
|
shutil.copyfileobj(fsrc=source_handle, fdst=dest_handle)
|
61
tools/pyutils/url_utils_test.py
Executable file
61
tools/pyutils/url_utils_test.py
Executable file
@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright 2014 Google Inc.
|
||||||
|
|
||||||
|
Use of this source code is governed by a BSD-style license that can be
|
||||||
|
found in the LICENSE file.
|
||||||
|
|
||||||
|
Test url_utils.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
# System-level imports
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
# Imports from within Skia
|
||||||
|
import url_utils
|
||||||
|
|
||||||
|
|
||||||
|
class UrlUtilsTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_create_filepath_url(self):
|
||||||
|
"""Tests create_filepath_url(). """
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
url_utils.create_filepath_url('http://1.2.3.4/path')
|
||||||
|
# Pass absolute filepath.
|
||||||
|
self.assertEquals(
|
||||||
|
url_utils.create_filepath_url(
|
||||||
|
'%sdir%sfile' % (os.path.sep, os.path.sep)),
|
||||||
|
'file:///dir/file')
|
||||||
|
# Pass relative filepath.
|
||||||
|
self.assertEquals(
|
||||||
|
url_utils.create_filepath_url(os.path.join('dir', 'file')),
|
||||||
|
'file://%s/dir/file' % urllib.pathname2url(os.getcwd()))
|
||||||
|
|
||||||
|
def test_copy_contents(self):
|
||||||
|
"""Tests copy_contents(). """
|
||||||
|
contents = 'these are the contents'
|
||||||
|
tempdir_path = tempfile.mkdtemp()
|
||||||
|
try:
|
||||||
|
source_path = os.path.join(tempdir_path, 'source')
|
||||||
|
source_url = url_utils.create_filepath_url(source_path)
|
||||||
|
with open(source_path, 'w') as source_handle:
|
||||||
|
source_handle.write(contents)
|
||||||
|
dest_path = os.path.join(tempdir_path, 'new_subdir', 'dest')
|
||||||
|
# Destination subdir does not exist, so copy_contents() should fail
|
||||||
|
# if create_subdirs_if_needed is False.
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
url_utils.copy_contents(source_url=source_url,
|
||||||
|
dest_path=dest_path,
|
||||||
|
create_subdirs_if_needed=False)
|
||||||
|
# If create_subdirs_if_needed is True, it should work.
|
||||||
|
url_utils.copy_contents(source_url=source_url,
|
||||||
|
dest_path=dest_path,
|
||||||
|
create_subdirs_if_needed=True)
|
||||||
|
self.assertEquals(open(dest_path).read(), contents)
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tempdir_path)
|
32
tools/test_all.py
Executable file
32
tools/test_all.py
Executable file
@ -0,0 +1,32 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright 2014 Google Inc.
|
||||||
|
|
||||||
|
Use of this source code is governed by a BSD-style license that can be
|
||||||
|
found in the LICENSE file.
|
||||||
|
|
||||||
|
Run all unittests within this directory tree, recursing into subdirectories.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from tests import skimage_self_test
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# First, run any tests that cannot be automatically discovered (because
|
||||||
|
# they don't use Python's unittest framework).
|
||||||
|
skimage_self_test.main()
|
||||||
|
|
||||||
|
# Now discover/run all tests that use Python's unittest framework.
|
||||||
|
suite = unittest.TestLoader().discover(os.path.dirname(__file__),
|
||||||
|
pattern='*_test.py')
|
||||||
|
results = unittest.TextTestRunner(verbosity=2).run(suite)
|
||||||
|
print repr(results)
|
||||||
|
if not results.wasSuccessful():
|
||||||
|
raise Exception('failed one or more unittests')
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
0
tools/tests/__init__.py
Normal file
0
tools/tests/__init__.py
Normal file
@ -201,7 +201,7 @@ jsondiff_test "$JSONDIFF_INPUT/old.json $JSONDIFF_INPUT/new.json" "$JSONDIFF_OUT
|
|||||||
# ('make tools/tests/run.sh work cross-platform')
|
# ('make tools/tests/run.sh work cross-platform')
|
||||||
#
|
#
|
||||||
|
|
||||||
COMMAND="python tools/tests/run_all.py"
|
COMMAND="python tools/test_all.py"
|
||||||
echo "$COMMAND"
|
echo "$COMMAND"
|
||||||
$COMMAND
|
$COMMAND
|
||||||
ret=$?
|
ret=$?
|
||||||
|
@ -1,22 +0,0 @@
|
|||||||
#!/usr/bin/python
|
|
||||||
|
|
||||||
"""
|
|
||||||
Copyright 2013 Google Inc.
|
|
||||||
|
|
||||||
Use of this source code is governed by a BSD-style license that can be
|
|
||||||
found in the LICENSE file.
|
|
||||||
|
|
||||||
Run all self-tests that were written in Python, raising an exception if any
|
|
||||||
of them fail.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import render_pictures_test
|
|
||||||
import skimage_self_test
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Run all self-tests, raising an exception if any of them fail."""
|
|
||||||
render_pictures_test.main()
|
|
||||||
skimage_self_test.main()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
Loading…
Reference in New Issue
Block a user