2011-12-29 21:13:08 +00:00
|
|
|
'''
|
|
|
|
Downloads the actual gm results most recently generated by the Skia buildbots,
|
|
|
|
and adds any new ones to SVN control.
|
|
|
|
|
|
|
|
This tool makes it much easier to check in new baselines, via the following
|
|
|
|
steps:
|
|
|
|
|
|
|
|
cd .../trunk
|
|
|
|
svn update
|
|
|
|
# make sure there are no files awaiting svn commit
|
2012-01-10 14:10:34 +00:00
|
|
|
python tools/download-baselines.py gm/base-macmini-lion-fixed # or other gm/ subdir
|
2011-12-29 21:13:08 +00:00
|
|
|
# upload CL for review
|
|
|
|
# validate that the new images look right
|
|
|
|
# commit CL
|
|
|
|
|
2012-01-10 14:10:34 +00:00
|
|
|
Launch with --help to see more options.
|
|
|
|
|
2011-12-29 21:13:08 +00:00
|
|
|
|
|
|
|
Copyright 2011 Google Inc.
|
|
|
|
|
|
|
|
Use of this source code is governed by a BSD-style license that can be
|
|
|
|
found in the LICENSE file.
|
|
|
|
'''
|
|
|
|
|
|
|
|
# common Python modules
|
2012-01-10 14:10:34 +00:00
|
|
|
import optparse
|
|
|
|
import os
|
2011-12-29 21:13:08 +00:00
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import urllib2
|
|
|
|
|
|
|
|
# modules declared within this same directory
|
|
|
|
import svn
|
|
|
|
|
|
|
|
# Where to download recently generated baseline images for each baseline type.
|
|
|
|
#
|
|
|
|
# For now this only works for our Mac buildbots; our other buildbots aren't
|
|
|
|
# uploading their results to a web server yet.
|
|
|
|
#
|
|
|
|
# Note also that these will currently work only within the Google corporate
|
|
|
|
# network; that will also change soon.
|
|
|
|
ACTUALS_BY_BASELINE_SUBDIR = {
|
|
|
|
'gm/base-macmini':
|
|
|
|
'http://172.29.92.185/b/build/slave/Skia_Mac_Float_NoDebug/gm/actual',
|
|
|
|
'gm/base-macmini-fixed':
|
|
|
|
'http://172.29.92.185/b/build/slave/Skia_Mac_Fixed_NoDebug/gm/actual',
|
|
|
|
'gm/base-macmini-lion-fixed':
|
|
|
|
'http://172.29.92.179/b/build/slave/Skia_MacMiniLion_Fixed_NoDebug/gm/actual',
|
|
|
|
'gm/base-macmini-lion-float':
|
|
|
|
'http://172.29.92.179/b/build/slave/Skia_MacMiniLion_Float_NoDebug/gm/actual',
|
|
|
|
}
|
|
|
|
|
2012-01-10 14:10:34 +00:00
|
|
|
USAGE_STRING = 'usage: %s [options] <baseline_subdir>'
|
|
|
|
OPTION_IGNORE_LOCAL_MODS = '--ignore-local-mods'
|
|
|
|
OPTION_ADD_NEW_FILES = '--add-new-files'
|
|
|
|
|
2011-12-29 21:13:08 +00:00
|
|
|
IMAGE_REGEX = '.+\.png'
|
|
|
|
IMAGE_MIMETYPE = 'image/png'
|
|
|
|
|
|
|
|
def GetPlatformUrl(baseline_subdir):
|
|
|
|
"""Return URL within which the buildbots store generated baseline images,
|
|
|
|
as of multiple svn revisions.
|
|
|
|
|
|
|
|
Raises KeyError if we don't have a URL matching this baseline_subdir.
|
|
|
|
|
|
|
|
@param baseline_subdir indicates which platform we want images for
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return ACTUALS_BY_BASELINE_SUBDIR[baseline_subdir]
|
|
|
|
except KeyError:
|
|
|
|
raise KeyError(
|
|
|
|
'unknown baseline_subdir "%s", try one of these instead: %s' % (
|
|
|
|
baseline_subdir, ACTUALS_BY_BASELINE_SUBDIR.keys()))
|
|
|
|
|
|
|
|
def GetLatestResultsUrl(baseline_subdir):
|
|
|
|
"""Return URL from which we can download the MOST RECENTLY generated
|
|
|
|
images for this baseline type.
|
|
|
|
|
|
|
|
@param baseline_subdir indicates which platform we want images for
|
|
|
|
"""
|
|
|
|
base_platform_url = GetPlatformUrl(baseline_subdir)
|
|
|
|
print 'base_platform_url is %s' % base_platform_url
|
|
|
|
|
|
|
|
# Find the most recently generated baseline images within base_platform_url
|
|
|
|
response = urllib2.urlopen(base_platform_url)
|
|
|
|
html = response.read()
|
|
|
|
link_regex = re.compile('<a href="(.*)">')
|
|
|
|
links = link_regex.findall(html)
|
|
|
|
last_link = links[-1]
|
|
|
|
most_recent_result_url = '%s/%s' % (base_platform_url, last_link)
|
|
|
|
print 'most_recent_result_url is %s' % most_recent_result_url
|
|
|
|
return most_recent_result_url
|
|
|
|
|
2012-01-10 14:10:34 +00:00
|
|
|
def DownloadMatchingFiles(source_url, filename_regex, dest_dir,
|
|
|
|
only_download_updates=False):
|
2011-12-29 21:13:08 +00:00
|
|
|
"""Download all files from source_url that match filename_regex, and save
|
|
|
|
them (with their original filenames) in dest_dir.
|
|
|
|
|
|
|
|
@param source_url
|
2012-01-10 14:10:34 +00:00
|
|
|
@param filename_regex only download files that match this regex
|
|
|
|
@param dest_dir where to save the downloaded files
|
|
|
|
@param only_download_updates if True, only download files that are already
|
|
|
|
present in dest_dir (download updated versions of those files)
|
2011-12-29 21:13:08 +00:00
|
|
|
"""
|
|
|
|
while source_url.endswith('/'):
|
|
|
|
source_url = source_url[:-1]
|
|
|
|
response = urllib2.urlopen(source_url)
|
|
|
|
html = response.read()
|
|
|
|
link_regex = re.compile('<a href="(%s)">' % filename_regex)
|
|
|
|
links = link_regex.findall(html)
|
|
|
|
for link in links:
|
2012-01-10 14:10:34 +00:00
|
|
|
dest_path = os.path.join(dest_dir, link)
|
|
|
|
if only_download_updates and not os.path.isfile(dest_path):
|
|
|
|
continue
|
|
|
|
DownloadBinaryFile('%s/%s' % (source_url, link), dest_path)
|
2011-12-29 21:13:08 +00:00
|
|
|
|
|
|
|
def DownloadBinaryFile(source_url, dest_path):
|
|
|
|
"""Download a single file from its source_url and save it to local disk
|
|
|
|
at dest_path.
|
|
|
|
|
|
|
|
@param source_url
|
|
|
|
@param dest_path
|
|
|
|
"""
|
|
|
|
print 'DownloadBinaryFile: %s -> %s' % (source_url, dest_path)
|
|
|
|
url_fh = urllib2.urlopen(source_url)
|
|
|
|
local_fh = open(dest_path, 'wb')
|
|
|
|
local_fh.write(url_fh.read())
|
|
|
|
local_fh.close()
|
|
|
|
|
2012-01-10 14:10:34 +00:00
|
|
|
def Main(options, args):
|
2011-12-29 21:13:08 +00:00
|
|
|
"""Download most recently generated baseline images for a given platform,
|
|
|
|
and add any new ones to SVN control.
|
|
|
|
|
2012-01-10 14:10:34 +00:00
|
|
|
@param options
|
|
|
|
@param args
|
2011-12-29 21:13:08 +00:00
|
|
|
"""
|
2012-01-10 14:10:34 +00:00
|
|
|
num_args = len(args)
|
|
|
|
if num_args != 1:
|
|
|
|
RaiseUsageException()
|
2011-12-29 21:13:08 +00:00
|
|
|
|
2012-01-10 14:10:34 +00:00
|
|
|
baseline_subdir = args[0]
|
2011-12-29 21:13:08 +00:00
|
|
|
while baseline_subdir.endswith('/'):
|
|
|
|
baseline_subdir = baseline_subdir[:-1]
|
2012-01-10 14:10:34 +00:00
|
|
|
svn_handler = svn.Svn(baseline_subdir)
|
2011-12-29 21:13:08 +00:00
|
|
|
|
2012-01-10 14:10:34 +00:00
|
|
|
# If there are any locally modified files in that directory, exit
|
|
|
|
# (so that we don't risk overwriting the user's previous work).
|
|
|
|
new_and_modified_files = svn_handler.GetNewAndModifiedFiles()
|
|
|
|
if not options.ignore_local_mods:
|
|
|
|
if new_and_modified_files:
|
|
|
|
raise Exception('Exiting because there are already new and/or '
|
|
|
|
'modified files in %s. To continue in spite of '
|
|
|
|
'that, run with %s option.' % (
|
|
|
|
baseline_subdir, OPTION_IGNORE_LOCAL_MODS))
|
|
|
|
|
|
|
|
# Download the actual results from the appropriate buildbot.
|
2011-12-29 21:13:08 +00:00
|
|
|
results_url = GetLatestResultsUrl(baseline_subdir)
|
2012-01-10 14:10:34 +00:00
|
|
|
DownloadMatchingFiles(source_url=results_url, filename_regex=IMAGE_REGEX,
|
|
|
|
dest_dir=baseline_subdir,
|
|
|
|
only_download_updates=(not options.add_new_files))
|
|
|
|
|
|
|
|
# Add any new files to SVN control (if we are running with add_new_files).
|
2011-12-29 21:13:08 +00:00
|
|
|
new_files = svn_handler.GetNewFiles()
|
2012-01-10 14:10:34 +00:00
|
|
|
if new_files and options.add_new_files:
|
2011-12-29 21:13:08 +00:00
|
|
|
svn_handler.AddFiles(new_files)
|
|
|
|
svn_handler.SetProperty(new_files, svn.PROPERTY_MIMETYPE,
|
|
|
|
IMAGE_MIMETYPE)
|
|
|
|
|
2012-01-10 14:10:34 +00:00
|
|
|
def RaiseUsageException():
|
|
|
|
raise Exception(USAGE_STRING % __file__)
|
|
|
|
|
2011-12-29 21:13:08 +00:00
|
|
|
if __name__ == '__main__':
|
2012-01-10 14:10:34 +00:00
|
|
|
parser = optparse.OptionParser(USAGE_STRING % '%prog')
|
|
|
|
parser.add_option(OPTION_IGNORE_LOCAL_MODS,
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='allow tool to run even if there are already '
|
|
|
|
'local modifications in the baseline_subdir')
|
|
|
|
parser.add_option(OPTION_ADD_NEW_FILES,
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='in addition to downloading new versions of '
|
|
|
|
'existing baselines, also download baselines that are '
|
|
|
|
'not under SVN control yet')
|
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
Main(options, args)
|