2014-10-17 14:37:05 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
|
|
|
# Use of this source code is governed by a BSD-style license that can be
|
|
|
|
# found in the LICENSE file.
|
|
|
|
|
|
|
|
"""Archives or replays webpages and creates SKPs in a Google Storage location.
|
|
|
|
|
|
|
|
To archive webpages and store SKP files (archives should be rarely updated):
|
|
|
|
|
2014-12-03 13:51:24 +00:00
|
|
|
cd skia
|
2015-03-31 07:18:26 +00:00
|
|
|
python tools/skp/webpages_playback.py --data_store=gs://rmistry --record \
|
2014-10-17 14:37:05 +00:00
|
|
|
--page_sets=all --skia_tools=/home/default/trunk/out/Debug/ \
|
|
|
|
--browser_executable=/tmp/chromium/out/Release/chrome
|
|
|
|
|
2015-03-31 07:18:26 +00:00
|
|
|
The above command uses Google Storage bucket 'rmistry' to download needed files.
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
To replay archived webpages and re-generate SKP files (should be run whenever
|
|
|
|
SkPicture.PICTURE_VERSION changes):
|
|
|
|
|
2014-12-03 13:51:24 +00:00
|
|
|
cd skia
|
2015-03-31 07:18:26 +00:00
|
|
|
python tools/skp/webpages_playback.py --data_store=gs://rmistry \
|
2014-10-17 14:37:05 +00:00
|
|
|
--page_sets=all --skia_tools=/home/default/trunk/out/Debug/ \
|
|
|
|
--browser_executable=/tmp/chromium/out/Release/chrome
|
|
|
|
|
|
|
|
|
|
|
|
Specify the --page_sets flag (default value is 'all') to pick a list of which
|
|
|
|
webpages should be archived and/or replayed. Eg:
|
|
|
|
|
2014-12-03 13:51:24 +00:00
|
|
|
--page_sets=tools/skp/page_sets/skia_yahooanswers_desktop.py,\
|
|
|
|
tools/skp/page_sets/skia_googlecalendar_nexus10.py
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
The --browser_executable flag should point to the browser binary you want to use
|
|
|
|
to capture archives and/or capture SKP files. Majority of the time it should be
|
|
|
|
a newly built chrome binary.
|
|
|
|
|
2017-12-13 15:39:20 +00:00
|
|
|
The --data_store flag controls where the needed artifacts are downloaded from.
|
|
|
|
It also controls where the generated artifacts, such as recorded webpages and
|
|
|
|
resulting skp renderings, are uploaded to. URLs with scheme 'gs://' use Google
|
|
|
|
Storage. Otherwise use local filesystem.
|
2015-03-31 07:18:26 +00:00
|
|
|
|
|
|
|
The --upload=True flag means generated artifacts will be
|
|
|
|
uploaded or copied to the location specified by --data_store. (default value is
|
|
|
|
False if not specified).
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
The --non-interactive flag controls whether the script will prompt the user
|
|
|
|
(default value is False if not specified).
|
|
|
|
|
|
|
|
The --skia_tools flag if specified will allow this script to run
|
|
|
|
debugger, render_pictures, and render_pdfs on the captured
|
|
|
|
SKP(s). The tools are run after all SKPs are succesfully captured to make sure
|
|
|
|
they can be added to the buildbots with no breakages.
|
|
|
|
"""
|
|
|
|
|
2021-03-25 13:04:43 +00:00
|
|
|
|
|
|
|
from __future__ import print_function
|
2019-02-06 17:33:44 +00:00
|
|
|
import datetime
|
2014-10-17 14:37:05 +00:00
|
|
|
import glob
|
|
|
|
import optparse
|
|
|
|
import os
|
|
|
|
import posixpath
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tempfile
|
|
|
|
import time
|
|
|
|
import traceback
|
|
|
|
|
|
|
|
|
|
|
|
ROOT_PLAYBACK_DIR_NAME = 'playback'
|
|
|
|
SKPICTURES_DIR_NAME = 'skps'
|
|
|
|
|
2016-10-10 12:57:18 +00:00
|
|
|
GS_PREFIX = 'gs://'
|
|
|
|
|
2016-02-01 18:27:05 +00:00
|
|
|
PARTNERS_GS_BUCKET = 'gs://chrome-partner-telemetry'
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
# Local archive and SKP directories.
|
|
|
|
LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR = os.path.join(
|
|
|
|
os.path.abspath(os.path.dirname(__file__)), 'page_sets', 'data')
|
|
|
|
TMP_SKP_DIR = tempfile.mkdtemp()
|
|
|
|
|
2019-03-22 17:28:51 +00:00
|
|
|
# Location of the credentials.json file and the string that represents missing
|
|
|
|
# passwords.
|
|
|
|
CREDENTIALS_FILE_PATH = os.path.join(
|
|
|
|
os.path.abspath(os.path.dirname(__file__)), 'page_sets', 'data',
|
|
|
|
'credentials.json'
|
|
|
|
)
|
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
# Name of the SKP benchmark
|
|
|
|
SKP_BENCHMARK = 'skpicture_printer'
|
|
|
|
|
|
|
|
# The max base name length of Skp files.
|
|
|
|
MAX_SKP_BASE_NAME_LEN = 31
|
|
|
|
|
|
|
|
# Dictionary of device to platform prefixes for SKP files.
|
|
|
|
DEVICE_TO_PLATFORM_PREFIX = {
|
|
|
|
'desktop': 'desk',
|
2019-03-22 17:54:54 +00:00
|
|
|
'mobile': 'mobi',
|
|
|
|
'tablet': 'tabl'
|
2014-10-17 14:37:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# How many times the record_wpr binary should be retried.
|
|
|
|
RETRY_RECORD_WPR_COUNT = 5
|
2014-10-17 19:15:46 +00:00
|
|
|
# How many times the run_benchmark binary should be retried.
|
2017-11-28 16:24:32 +00:00
|
|
|
RETRY_RUN_MEASUREMENT_COUNT = 3
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2019-03-22 17:28:51 +00:00
|
|
|
# Location of the credentials.json file in Google Storage.
|
|
|
|
CREDENTIALS_GS_PATH = 'playback/credentials/credentials.json'
|
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
X11_DISPLAY = os.getenv('DISPLAY', ':0')
|
|
|
|
|
2015-03-31 12:04:29 +00:00
|
|
|
# Path to Chromium's page sets.
|
|
|
|
CHROMIUM_PAGE_SETS_PATH = os.path.join('tools', 'perf', 'page_sets')
|
|
|
|
|
|
|
|
# Dictionary of supported Chromium page sets to their file prefixes.
|
|
|
|
CHROMIUM_PAGE_SETS_TO_PREFIX = {
|
|
|
|
}
|
|
|
|
|
2017-11-29 13:38:12 +00:00
|
|
|
PAGE_SETS_TO_EXCLUSIONS = {
|
|
|
|
# See skbug.com/7348
|
2018-05-20 14:46:04 +00:00
|
|
|
'key_mobile_sites_smooth.py': '"(digg|worldjournal|twitter|espn)"',
|
2017-12-18 17:30:04 +00:00
|
|
|
# See skbug.com/7421
|
|
|
|
'top_25_smooth.py': '"(mail\.google\.com)"',
|
2017-11-29 13:38:12 +00:00
|
|
|
}
|
|
|
|
|
2015-03-31 12:04:29 +00:00
|
|
|
|
2019-03-26 15:11:45 +00:00
|
|
|
class InvalidSKPException(Exception):
|
|
|
|
"""Raised when the created SKP is invalid."""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-12-03 12:38:46 +00:00
|
|
|
def remove_prefix(s, prefix):
|
|
|
|
if s.startswith(prefix):
|
|
|
|
return s[len(prefix):]
|
|
|
|
return s
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2015-03-31 12:04:29 +00:00
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
class SkPicturePlayback(object):
|
|
|
|
"""Class that archives or replays webpages and creates SKPs."""
|
|
|
|
|
|
|
|
def __init__(self, parse_options):
|
|
|
|
"""Constructs a SkPicturePlayback BuildStep instance."""
|
|
|
|
assert parse_options.browser_executable, 'Must specify --browser_executable'
|
|
|
|
self._browser_executable = parse_options.browser_executable
|
2015-04-23 19:47:33 +00:00
|
|
|
self._browser_args = '--disable-setuid-sandbox'
|
|
|
|
if parse_options.browser_extra_args:
|
|
|
|
self._browser_args = '%s %s' % (
|
|
|
|
self._browser_args, parse_options.browser_extra_args)
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2015-03-31 12:04:29 +00:00
|
|
|
self._chrome_page_sets_path = os.path.join(parse_options.chrome_src_path,
|
|
|
|
CHROMIUM_PAGE_SETS_PATH)
|
2014-10-17 14:37:05 +00:00
|
|
|
self._all_page_sets_specified = parse_options.page_sets == 'all'
|
|
|
|
self._page_sets = self._ParsePageSets(parse_options.page_sets)
|
|
|
|
|
|
|
|
self._record = parse_options.record
|
|
|
|
self._skia_tools = parse_options.skia_tools
|
|
|
|
self._non_interactive = parse_options.non_interactive
|
2015-03-31 07:18:26 +00:00
|
|
|
self._upload = parse_options.upload
|
2015-04-23 19:47:33 +00:00
|
|
|
self._skp_prefix = parse_options.skp_prefix
|
2015-03-31 07:18:26 +00:00
|
|
|
data_store_location = parse_options.data_store
|
2016-10-10 12:57:18 +00:00
|
|
|
if data_store_location.startswith(GS_PREFIX):
|
2015-03-31 07:18:26 +00:00
|
|
|
self.gs = GoogleStorageDataStore(data_store_location)
|
|
|
|
else:
|
|
|
|
self.gs = LocalFileSystemDataStore(data_store_location)
|
2016-02-01 18:27:05 +00:00
|
|
|
self._upload_to_partner_bucket = parse_options.upload_to_partner_bucket
|
2014-10-17 14:37:05 +00:00
|
|
|
self._alternate_upload_dir = parse_options.alternate_upload_dir
|
|
|
|
self._telemetry_binaries_dir = os.path.join(parse_options.chrome_src_path,
|
|
|
|
'tools', 'perf')
|
2016-02-27 18:04:57 +00:00
|
|
|
self._catapult_dir = os.path.join(parse_options.chrome_src_path,
|
|
|
|
'third_party', 'catapult')
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
self._local_skp_dir = os.path.join(
|
|
|
|
parse_options.output_dir, ROOT_PLAYBACK_DIR_NAME, SKPICTURES_DIR_NAME)
|
|
|
|
self._local_record_webpages_archive_dir = os.path.join(
|
|
|
|
parse_options.output_dir, ROOT_PLAYBACK_DIR_NAME, 'webpages_archive')
|
|
|
|
|
|
|
|
# List of SKP files generated by this script.
|
|
|
|
self._skp_files = []
|
|
|
|
|
|
|
|
def _ParsePageSets(self, page_sets):
|
|
|
|
if not page_sets:
|
|
|
|
raise ValueError('Must specify at least one page_set!')
|
|
|
|
elif self._all_page_sets_specified:
|
|
|
|
# Get everything from the page_sets directory.
|
|
|
|
page_sets_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),
|
|
|
|
'page_sets')
|
|
|
|
ps = [os.path.join(page_sets_dir, page_set)
|
|
|
|
for page_set in os.listdir(page_sets_dir)
|
|
|
|
if not os.path.isdir(os.path.join(page_sets_dir, page_set)) and
|
|
|
|
page_set.endswith('.py')]
|
2015-03-31 12:04:29 +00:00
|
|
|
chromium_ps = [
|
|
|
|
os.path.join(self._chrome_page_sets_path, cr_page_set)
|
|
|
|
for cr_page_set in CHROMIUM_PAGE_SETS_TO_PREFIX]
|
|
|
|
ps.extend(chromium_ps)
|
2014-10-17 14:37:05 +00:00
|
|
|
elif '*' in page_sets:
|
|
|
|
# Explode and return the glob.
|
|
|
|
ps = glob.glob(page_sets)
|
|
|
|
else:
|
|
|
|
ps = page_sets.split(',')
|
|
|
|
ps.sort()
|
|
|
|
return ps
|
|
|
|
|
2015-03-31 12:04:29 +00:00
|
|
|
def _IsChromiumPageSet(self, page_set):
|
|
|
|
"""Returns true if the specified page set is a Chromium page set."""
|
|
|
|
return page_set.startswith(self._chrome_page_sets_path)
|
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
def Run(self):
|
|
|
|
"""Run the SkPicturePlayback BuildStep."""
|
|
|
|
|
2019-03-22 17:28:51 +00:00
|
|
|
# Download the credentials file if it was not previously downloaded.
|
|
|
|
if not os.path.isfile(CREDENTIALS_FILE_PATH):
|
|
|
|
# Download the credentials.json file from Google Storage.
|
|
|
|
self.gs.download_file(CREDENTIALS_GS_PATH, CREDENTIALS_FILE_PATH)
|
|
|
|
|
|
|
|
if not os.path.isfile(CREDENTIALS_FILE_PATH):
|
|
|
|
raise Exception("""Could not locate credentials file in the storage.
|
|
|
|
Please create a credentials file in gs://%s that contains:
|
|
|
|
{
|
|
|
|
"google": {
|
|
|
|
"username": "google_testing_account_username",
|
|
|
|
"password": "google_testing_account_password"
|
|
|
|
}
|
|
|
|
}\n\n""" % CREDENTIALS_GS_PATH)
|
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
# Delete any left over data files in the data directory.
|
|
|
|
for archive_file in glob.glob(
|
|
|
|
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, 'skia_*')):
|
|
|
|
os.remove(archive_file)
|
|
|
|
|
|
|
|
# Create the required local storage directories.
|
|
|
|
self._CreateLocalStorageDirs()
|
|
|
|
|
|
|
|
# Start the timer.
|
|
|
|
start_time = time.time()
|
|
|
|
|
|
|
|
# Loop through all page_sets.
|
|
|
|
for page_set in self._page_sets:
|
2019-04-21 11:25:38 +00:00
|
|
|
if os.path.basename(page_set) == '__init__.py':
|
|
|
|
continue
|
2014-10-27 13:42:11 +00:00
|
|
|
page_set_basename = os.path.basename(page_set).split('.')[0]
|
|
|
|
page_set_json_name = page_set_basename + '.json'
|
2019-03-22 17:28:39 +00:00
|
|
|
wpr_data_file_glob = (
|
|
|
|
page_set.split(os.path.sep)[-1].split('.')[0] + '_*.wprgo')
|
2014-10-27 13:42:11 +00:00
|
|
|
page_set_dir = os.path.dirname(page_set)
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2015-03-31 12:04:29 +00:00
|
|
|
if self._IsChromiumPageSet(page_set):
|
2021-03-25 13:04:43 +00:00
|
|
|
print('Using Chromium\'s captured archives for Chromium\'s page sets.')
|
2015-03-31 12:04:29 +00:00
|
|
|
elif self._record:
|
2014-10-17 14:37:05 +00:00
|
|
|
# Create an archive of the specified webpages if '--record=True' is
|
|
|
|
# specified.
|
|
|
|
record_wpr_cmd = (
|
2016-02-27 18:04:57 +00:00
|
|
|
'PYTHONPATH=%s:%s:$PYTHONPATH' % (page_set_dir, self._catapult_dir),
|
2014-10-17 14:37:05 +00:00
|
|
|
'DISPLAY=%s' % X11_DISPLAY,
|
|
|
|
os.path.join(self._telemetry_binaries_dir, 'record_wpr'),
|
2015-04-23 19:47:33 +00:00
|
|
|
'--extra-browser-args="%s"' % self._browser_args,
|
2014-10-17 14:37:05 +00:00
|
|
|
'--browser=exact',
|
|
|
|
'--browser-executable=%s' % self._browser_executable,
|
2014-10-27 13:42:11 +00:00
|
|
|
'%s_page_set' % page_set_basename,
|
|
|
|
'--page-set-base-dir=%s' % page_set_dir
|
2014-10-17 14:37:05 +00:00
|
|
|
)
|
|
|
|
for _ in range(RETRY_RECORD_WPR_COUNT):
|
2014-10-28 21:25:17 +00:00
|
|
|
try:
|
2016-10-10 12:57:18 +00:00
|
|
|
subprocess.check_call(' '.join(record_wpr_cmd), shell=True)
|
2015-03-30 05:33:16 +00:00
|
|
|
|
2017-07-18 17:13:53 +00:00
|
|
|
# Copy over the created archive into the local webpages archive
|
2015-03-30 05:33:16 +00:00
|
|
|
# directory.
|
2019-03-22 17:28:39 +00:00
|
|
|
for wpr_data_file in glob.glob(os.path.join(
|
|
|
|
LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, wpr_data_file_glob)):
|
|
|
|
shutil.copy(
|
|
|
|
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, wpr_data_file),
|
|
|
|
self._local_record_webpages_archive_dir)
|
2017-07-18 17:13:53 +00:00
|
|
|
shutil.copy(
|
2015-03-30 05:33:16 +00:00
|
|
|
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
|
|
|
|
page_set_json_name),
|
|
|
|
self._local_record_webpages_archive_dir)
|
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
# Break out of the retry loop since there were no errors.
|
|
|
|
break
|
2014-10-28 21:25:17 +00:00
|
|
|
except Exception:
|
|
|
|
# There was a failure continue with the loop.
|
|
|
|
traceback.print_exc()
|
2014-10-17 14:37:05 +00:00
|
|
|
else:
|
|
|
|
# If we get here then record_wpr did not succeed and thus did not
|
|
|
|
# break out of the loop.
|
|
|
|
raise Exception('record_wpr failed for page_set: %s' % page_set)
|
|
|
|
|
|
|
|
else:
|
2015-03-31 07:18:26 +00:00
|
|
|
# Get the webpages archive so that it can be replayed.
|
2019-03-22 17:28:39 +00:00
|
|
|
self._DownloadWebpagesArchive(wpr_data_file_glob, page_set_json_name)
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2017-11-29 13:38:12 +00:00
|
|
|
run_benchmark_cmd = [
|
2016-02-27 18:04:57 +00:00
|
|
|
'PYTHONPATH=%s:%s:$PYTHONPATH' % (page_set_dir, self._catapult_dir),
|
2014-10-17 14:37:05 +00:00
|
|
|
'DISPLAY=%s' % X11_DISPLAY,
|
2016-09-08 13:08:44 +00:00
|
|
|
'timeout', '1800',
|
2014-10-17 19:15:46 +00:00
|
|
|
os.path.join(self._telemetry_binaries_dir, 'run_benchmark'),
|
2015-04-23 19:47:33 +00:00
|
|
|
'--extra-browser-args="%s"' % self._browser_args,
|
2014-10-17 14:37:05 +00:00
|
|
|
'--browser=exact',
|
|
|
|
'--browser-executable=%s' % self._browser_executable,
|
|
|
|
SKP_BENCHMARK,
|
2014-10-27 13:42:11 +00:00
|
|
|
'--page-set-name=%s' % page_set_basename,
|
2014-10-22 12:04:43 +00:00
|
|
|
'--page-set-base-dir=%s' % page_set_dir,
|
|
|
|
'--skp-outdir=%s' % TMP_SKP_DIR,
|
2017-11-28 16:24:32 +00:00
|
|
|
'--also-run-disabled-tests',
|
2017-11-29 13:38:12 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
exclusions = PAGE_SETS_TO_EXCLUSIONS.get(os.path.basename(page_set))
|
|
|
|
if exclusions:
|
|
|
|
run_benchmark_cmd.append('--story-filter-exclude=' + exclusions)
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
for _ in range(RETRY_RUN_MEASUREMENT_COUNT):
|
|
|
|
try:
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Capturing SKP of %s=======\n\n' % page_set)
|
2016-10-10 12:57:18 +00:00
|
|
|
subprocess.check_call(' '.join(run_benchmark_cmd), shell=True)
|
|
|
|
except subprocess.CalledProcessError:
|
2014-10-17 14:37:05 +00:00
|
|
|
# There was a failure continue with the loop.
|
|
|
|
traceback.print_exc()
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Retrying %s=======\n\n' % page_set)
|
2014-10-17 14:37:05 +00:00
|
|
|
time.sleep(10)
|
2017-11-28 16:24:32 +00:00
|
|
|
continue
|
|
|
|
|
2019-03-26 15:11:45 +00:00
|
|
|
try:
|
|
|
|
# Rename generated SKP files into more descriptive names.
|
|
|
|
self._RenameSkpFiles(page_set)
|
|
|
|
except InvalidSKPException:
|
|
|
|
# There was a failure continue with the loop.
|
|
|
|
traceback.print_exc()
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Retrying %s=======\n\n' % page_set)
|
2019-03-26 15:11:45 +00:00
|
|
|
time.sleep(10)
|
|
|
|
continue
|
|
|
|
|
2017-11-28 16:24:32 +00:00
|
|
|
# Break out of the retry loop since there were no errors.
|
|
|
|
break
|
2014-10-17 14:37:05 +00:00
|
|
|
else:
|
2014-10-17 19:15:46 +00:00
|
|
|
# If we get here then run_benchmark did not succeed and thus did not
|
2014-10-17 14:37:05 +00:00
|
|
|
# break out of the loop.
|
2014-10-17 19:15:46 +00:00
|
|
|
raise Exception('run_benchmark failed for page_set: %s' % page_set)
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Capturing SKP files took %s seconds=======\n\n' % (
|
|
|
|
time.time() - start_time))
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
if self._skia_tools:
|
|
|
|
render_pictures_cmd = [
|
|
|
|
os.path.join(self._skia_tools, 'render_pictures'),
|
|
|
|
'-r', self._local_skp_dir
|
|
|
|
]
|
|
|
|
render_pdfs_cmd = [
|
|
|
|
os.path.join(self._skia_tools, 'render_pdfs'),
|
2014-12-03 12:22:06 +00:00
|
|
|
'-r', self._local_skp_dir
|
2014-10-17 14:37:05 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
for tools_cmd in (render_pictures_cmd, render_pdfs_cmd):
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Running %s=======' % ' '.join(tools_cmd))
|
2016-10-10 12:57:18 +00:00
|
|
|
subprocess.check_call(tools_cmd)
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
if not self._non_interactive:
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Running debugger=======')
|
2014-10-17 14:37:05 +00:00
|
|
|
os.system('%s %s' % (os.path.join(self._skia_tools, 'debugger'),
|
2014-12-03 14:18:12 +00:00
|
|
|
self._local_skp_dir))
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n')
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2015-03-31 07:18:26 +00:00
|
|
|
if self._upload:
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Uploading to %s=======\n\n' % self.gs.target_type())
|
2014-10-17 14:37:05 +00:00
|
|
|
# Copy the directory structure in the root directory into Google Storage.
|
|
|
|
dest_dir_name = ROOT_PLAYBACK_DIR_NAME
|
|
|
|
if self._alternate_upload_dir:
|
|
|
|
dest_dir_name = self._alternate_upload_dir
|
|
|
|
|
2015-03-31 07:18:26 +00:00
|
|
|
self.gs.upload_dir_contents(
|
2016-10-10 12:57:18 +00:00
|
|
|
self._local_skp_dir, dest_dir=dest_dir_name)
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======New SKPs have been uploaded to %s =======\n\n' %
|
|
|
|
posixpath.join(self.gs.target_name(), dest_dir_name,
|
|
|
|
SKPICTURES_DIR_NAME))
|
2016-02-01 18:27:05 +00:00
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
else:
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Not Uploading to %s=======\n\n' % self.gs.target_type())
|
|
|
|
print('Generated resources are available in %s\n\n' % self._local_skp_dir)
|
2014-10-17 14:37:05 +00:00
|
|
|
|
2016-07-25 12:18:15 +00:00
|
|
|
if self._upload_to_partner_bucket:
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======Uploading to Partner bucket %s =======\n\n' %
|
|
|
|
PARTNERS_GS_BUCKET)
|
2016-07-25 12:18:15 +00:00
|
|
|
partner_gs = GoogleStorageDataStore(PARTNERS_GS_BUCKET)
|
2019-02-06 17:33:44 +00:00
|
|
|
timestamp = datetime.datetime.utcnow().strftime('%Y-%m-%d')
|
|
|
|
upload_dir = posixpath.join(SKPICTURES_DIR_NAME, timestamp)
|
|
|
|
try:
|
|
|
|
partner_gs.delete_path(upload_dir)
|
|
|
|
except subprocess.CalledProcessError:
|
2021-03-25 13:04:43 +00:00
|
|
|
print('Cannot delete %s because it does not exist yet.' % upload_dir)
|
|
|
|
print('Uploading %s to %s' % (self._local_skp_dir, upload_dir))
|
2019-02-06 17:33:44 +00:00
|
|
|
partner_gs.upload_dir_contents(self._local_skp_dir, upload_dir)
|
2021-03-25 13:04:43 +00:00
|
|
|
print('\n\n=======New SKPs have been uploaded to %s =======\n\n' %
|
|
|
|
posixpath.join(partner_gs.target_name(), upload_dir))
|
2016-07-25 12:18:15 +00:00
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
return 0
|
|
|
|
|
2015-03-31 12:04:29 +00:00
|
|
|
def _GetSkiaSkpFileName(self, page_set):
|
|
|
|
"""Returns the SKP file name for Skia page sets."""
|
|
|
|
# /path/to/skia_yahooanswers_desktop.py -> skia_yahooanswers_desktop.py
|
|
|
|
ps_filename = os.path.basename(page_set)
|
|
|
|
# skia_yahooanswers_desktop.py -> skia_yahooanswers_desktop
|
|
|
|
ps_basename, _ = os.path.splitext(ps_filename)
|
|
|
|
# skia_yahooanswers_desktop -> skia, yahooanswers, desktop
|
|
|
|
_, page_name, device = ps_basename.split('_')
|
|
|
|
basename = '%s_%s' % (DEVICE_TO_PLATFORM_PREFIX[device], page_name)
|
|
|
|
return basename[:MAX_SKP_BASE_NAME_LEN] + '.skp'
|
|
|
|
|
|
|
|
def _GetChromiumSkpFileName(self, page_set, site):
|
|
|
|
"""Returns the SKP file name for Chromium page sets."""
|
|
|
|
# /path/to/http___mobile_news_sandbox_pt0 -> http___mobile_news_sandbox_pt0
|
|
|
|
_, webpage = os.path.split(site)
|
|
|
|
# http___mobile_news_sandbox_pt0 -> mobile_news_sandbox_pt0
|
2015-04-03 15:22:51 +00:00
|
|
|
for prefix in ('http___', 'https___', 'www_'):
|
2015-03-31 17:59:15 +00:00
|
|
|
if webpage.startswith(prefix):
|
|
|
|
webpage = webpage[len(prefix):]
|
2015-03-31 12:04:29 +00:00
|
|
|
# /path/to/skia_yahooanswers_desktop.py -> skia_yahooanswers_desktop.py
|
|
|
|
ps_filename = os.path.basename(page_set)
|
|
|
|
# http___mobile_news_sandbox -> pagesetprefix_http___mobile_news_sandbox
|
|
|
|
basename = '%s_%s' % (CHROMIUM_PAGE_SETS_TO_PREFIX[ps_filename], webpage)
|
|
|
|
return basename[:MAX_SKP_BASE_NAME_LEN] + '.skp'
|
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
def _RenameSkpFiles(self, page_set):
|
|
|
|
"""Rename generated SKP files into more descriptive names.
|
|
|
|
|
|
|
|
Look into the subdirectory of TMP_SKP_DIR and find the most interesting
|
|
|
|
.skp in there to be this page_set's representative .skp.
|
2019-03-26 15:11:45 +00:00
|
|
|
|
|
|
|
Throws InvalidSKPException if the chosen .skp is less than 1KB. This
|
|
|
|
typically happens when there is a 404 or a redirect loop. Anything greater
|
|
|
|
than 1KB seems to have captured at least some useful information.
|
2014-10-17 14:37:05 +00:00
|
|
|
"""
|
|
|
|
subdirs = glob.glob(os.path.join(TMP_SKP_DIR, '*'))
|
|
|
|
for site in subdirs:
|
2015-03-31 12:04:29 +00:00
|
|
|
if self._IsChromiumPageSet(page_set):
|
|
|
|
filename = self._GetChromiumSkpFileName(page_set, site)
|
|
|
|
else:
|
|
|
|
filename = self._GetSkiaSkpFileName(page_set)
|
2015-04-03 15:22:51 +00:00
|
|
|
filename = filename.lower()
|
2015-03-31 12:04:29 +00:00
|
|
|
|
2015-04-23 19:47:33 +00:00
|
|
|
if self._skp_prefix:
|
|
|
|
filename = '%s%s' % (self._skp_prefix, filename)
|
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
# We choose the largest .skp as the most likely to be interesting.
|
|
|
|
largest_skp = max(glob.glob(os.path.join(site, '*.skp')),
|
|
|
|
key=lambda path: os.stat(path).st_size)
|
|
|
|
dest = os.path.join(self._local_skp_dir, filename)
|
2021-03-25 13:04:43 +00:00
|
|
|
print('Moving', largest_skp, 'to', dest)
|
2014-10-17 14:37:05 +00:00
|
|
|
shutil.move(largest_skp, dest)
|
|
|
|
self._skp_files.append(filename)
|
|
|
|
shutil.rmtree(site)
|
2019-03-26 15:11:45 +00:00
|
|
|
skp_size = os.path.getsize(dest)
|
|
|
|
if skp_size < 1024:
|
|
|
|
raise InvalidSKPException(
|
|
|
|
'Size of %s is only %d. Something is wrong.' % (dest, skp_size))
|
|
|
|
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
def _CreateLocalStorageDirs(self):
|
|
|
|
"""Creates required local storage directories for this script."""
|
|
|
|
for d in (self._local_record_webpages_archive_dir,
|
|
|
|
self._local_skp_dir):
|
|
|
|
if os.path.exists(d):
|
|
|
|
shutil.rmtree(d)
|
|
|
|
os.makedirs(d)
|
|
|
|
|
2014-10-27 13:42:11 +00:00
|
|
|
def _DownloadWebpagesArchive(self, wpr_data_file, page_set_json_name):
|
2014-10-17 14:37:05 +00:00
|
|
|
"""Downloads the webpages archive and its required page set from GS."""
|
|
|
|
wpr_source = posixpath.join(ROOT_PLAYBACK_DIR_NAME, 'webpages_archive',
|
|
|
|
wpr_data_file)
|
|
|
|
page_set_source = posixpath.join(ROOT_PLAYBACK_DIR_NAME,
|
|
|
|
'webpages_archive',
|
2014-10-27 13:42:11 +00:00
|
|
|
page_set_json_name)
|
2015-03-31 07:18:26 +00:00
|
|
|
gs = self.gs
|
|
|
|
if (gs.does_storage_object_exist(wpr_source) and
|
|
|
|
gs.does_storage_object_exist(page_set_source)):
|
2019-03-22 17:28:39 +00:00
|
|
|
gs.download_file(wpr_source, LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR)
|
2015-03-31 07:18:26 +00:00
|
|
|
gs.download_file(page_set_source,
|
2014-10-17 14:37:05 +00:00
|
|
|
os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
|
2014-10-27 13:42:11 +00:00
|
|
|
page_set_json_name))
|
2014-10-17 14:37:05 +00:00
|
|
|
else:
|
2015-03-31 07:18:26 +00:00
|
|
|
raise Exception('%s and %s do not exist in %s!' % (gs.target_type(),
|
|
|
|
wpr_source, page_set_source))
|
|
|
|
|
|
|
|
class DataStore:
|
|
|
|
"""An abstract base class for uploading recordings to a data storage.
|
|
|
|
The interface emulates the google storage api."""
|
|
|
|
def target_name(self):
|
|
|
|
raise NotImplementedError()
|
|
|
|
def target_type(self):
|
|
|
|
raise NotImplementedError()
|
2016-10-10 12:57:18 +00:00
|
|
|
def does_storage_object_exist(self, name):
|
2015-03-31 07:18:26 +00:00
|
|
|
raise NotImplementedError()
|
2016-10-10 12:57:18 +00:00
|
|
|
def download_file(self, name, local_path):
|
2015-03-31 07:18:26 +00:00
|
|
|
raise NotImplementedError()
|
2016-10-10 12:57:18 +00:00
|
|
|
def upload_dir_contents(self, source_dir, dest_dir):
|
2015-03-31 07:18:26 +00:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2016-10-10 12:57:18 +00:00
|
|
|
|
2015-03-31 07:18:26 +00:00
|
|
|
class GoogleStorageDataStore(DataStore):
|
|
|
|
def __init__(self, data_store_url):
|
2016-10-10 15:14:11 +00:00
|
|
|
self._url = data_store_url.rstrip('/')
|
2016-10-10 12:57:18 +00:00
|
|
|
|
2015-03-31 07:18:26 +00:00
|
|
|
def target_name(self):
|
2016-10-10 12:57:18 +00:00
|
|
|
return self._url
|
|
|
|
|
2015-03-31 07:18:26 +00:00
|
|
|
def target_type(self):
|
|
|
|
return 'Google Storage'
|
2016-10-10 12:57:18 +00:00
|
|
|
|
|
|
|
def does_storage_object_exist(self, name):
|
|
|
|
try:
|
|
|
|
output = subprocess.check_output([
|
|
|
|
'gsutil', 'ls', '/'.join((self._url, name))])
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
return False
|
2016-10-10 20:20:49 +00:00
|
|
|
if len(output.splitlines()) != 1:
|
2016-10-10 12:57:18 +00:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2016-02-03 12:27:54 +00:00
|
|
|
def delete_path(self, path):
|
2016-10-17 12:12:10 +00:00
|
|
|
subprocess.check_call(['gsutil', 'rm', '-r', '/'.join((self._url, path))])
|
2016-10-10 12:57:18 +00:00
|
|
|
|
|
|
|
def download_file(self, name, local_path):
|
|
|
|
subprocess.check_call([
|
|
|
|
'gsutil', 'cp', '/'.join((self._url, name)), local_path])
|
|
|
|
|
|
|
|
def upload_dir_contents(self, source_dir, dest_dir):
|
|
|
|
subprocess.check_call([
|
2016-10-17 15:09:33 +00:00
|
|
|
'gsutil', 'cp', '-r', source_dir, '/'.join((self._url, dest_dir))])
|
2016-10-10 12:57:18 +00:00
|
|
|
|
2015-03-31 07:18:26 +00:00
|
|
|
|
|
|
|
class LocalFileSystemDataStore(DataStore):
|
|
|
|
def __init__(self, data_store_location):
|
|
|
|
self._base_dir = data_store_location
|
|
|
|
def target_name(self):
|
|
|
|
return self._base_dir
|
|
|
|
def target_type(self):
|
|
|
|
return self._base_dir
|
2016-10-10 12:57:18 +00:00
|
|
|
def does_storage_object_exist(self, name):
|
2015-03-31 07:18:26 +00:00
|
|
|
return os.path.isfile(os.path.join(self._base_dir, name))
|
2016-02-03 12:27:54 +00:00
|
|
|
def delete_path(self, path):
|
|
|
|
shutil.rmtree(path)
|
2016-10-10 12:57:18 +00:00
|
|
|
def download_file(self, name, local_path):
|
2015-03-31 07:18:26 +00:00
|
|
|
shutil.copyfile(os.path.join(self._base_dir, name), local_path)
|
2016-10-10 12:57:18 +00:00
|
|
|
def upload_dir_contents(self, source_dir, dest_dir):
|
2015-03-31 07:18:26 +00:00
|
|
|
def copytree(source_dir, dest_dir):
|
|
|
|
if not os.path.exists(dest_dir):
|
|
|
|
os.makedirs(dest_dir)
|
|
|
|
for item in os.listdir(source_dir):
|
|
|
|
source = os.path.join(source_dir, item)
|
|
|
|
dest = os.path.join(dest_dir, item)
|
|
|
|
if os.path.isdir(source):
|
|
|
|
copytree(source, dest)
|
|
|
|
else:
|
|
|
|
shutil.copy2(source, dest)
|
|
|
|
copytree(source_dir, os.path.join(self._base_dir, dest_dir))
|
2014-10-17 14:37:05 +00:00
|
|
|
|
|
|
|
if '__main__' == __name__:
|
|
|
|
option_parser = optparse.OptionParser()
|
|
|
|
option_parser.add_option(
|
|
|
|
'', '--page_sets',
|
|
|
|
help='Specifies the page sets to use to archive. Supports globs.',
|
|
|
|
default='all')
|
|
|
|
option_parser.add_option(
|
|
|
|
'', '--record', action='store_true',
|
|
|
|
help='Specifies whether a new website archive should be created.',
|
|
|
|
default=False)
|
|
|
|
option_parser.add_option(
|
|
|
|
'', '--skia_tools',
|
|
|
|
help=('Path to compiled Skia executable tools. '
|
|
|
|
'render_pictures/render_pdfs is run on the set '
|
|
|
|
'after all SKPs are captured. If the script is run without '
|
|
|
|
'--non-interactive then the debugger is also run at the end. Debug '
|
|
|
|
'builds are recommended because they seem to catch more failures '
|
|
|
|
'than Release builds.'),
|
|
|
|
default=None)
|
|
|
|
option_parser.add_option(
|
2015-03-31 07:18:26 +00:00
|
|
|
'', '--upload', action='store_true',
|
|
|
|
help=('Uploads to Google Storage or copies to local filesystem storage '
|
|
|
|
' if this is True.'),
|
2014-10-17 14:37:05 +00:00
|
|
|
default=False)
|
2016-02-01 18:27:05 +00:00
|
|
|
option_parser.add_option(
|
|
|
|
'', '--upload_to_partner_bucket', action='store_true',
|
|
|
|
help=('Uploads SKPs to the chrome-partner-telemetry Google Storage '
|
|
|
|
'bucket if true.'),
|
|
|
|
default=False)
|
2015-03-31 07:18:26 +00:00
|
|
|
option_parser.add_option(
|
|
|
|
'', '--data_store',
|
|
|
|
help=('The location of the file storage to use to download and upload '
|
|
|
|
'files. Can be \'gs://<bucket>\' for Google Storage, or '
|
|
|
|
'a directory for local filesystem storage'),
|
2016-10-05 15:23:04 +00:00
|
|
|
default='gs://skia-skps')
|
2014-10-17 14:37:05 +00:00
|
|
|
option_parser.add_option(
|
|
|
|
'', '--alternate_upload_dir',
|
2015-03-31 07:18:26 +00:00
|
|
|
help= ('Uploads to a different directory in Google Storage or local '
|
|
|
|
'storage if this flag is specified'),
|
2014-10-17 14:37:05 +00:00
|
|
|
default=None)
|
|
|
|
option_parser.add_option(
|
|
|
|
'', '--output_dir',
|
2015-03-31 07:18:26 +00:00
|
|
|
help=('Temporary directory where SKPs and webpage archives will be '
|
|
|
|
'outputted to.'),
|
2014-10-17 14:37:05 +00:00
|
|
|
default=tempfile.gettempdir())
|
|
|
|
option_parser.add_option(
|
|
|
|
'', '--browser_executable',
|
|
|
|
help='The exact browser executable to run.',
|
|
|
|
default=None)
|
2015-04-23 19:47:33 +00:00
|
|
|
option_parser.add_option(
|
|
|
|
'', '--browser_extra_args',
|
|
|
|
help='Additional arguments to pass to the browser.',
|
|
|
|
default=None)
|
2014-10-17 14:37:05 +00:00
|
|
|
option_parser.add_option(
|
|
|
|
'', '--chrome_src_path',
|
|
|
|
help='Path to the chromium src directory.',
|
|
|
|
default=None)
|
|
|
|
option_parser.add_option(
|
|
|
|
'', '--non-interactive', action='store_true',
|
|
|
|
help='Runs the script without any prompts. If this flag is specified and '
|
|
|
|
'--skia_tools is specified then the debugger is not run.',
|
|
|
|
default=False)
|
2015-04-23 19:47:33 +00:00
|
|
|
option_parser.add_option(
|
|
|
|
'', '--skp_prefix',
|
|
|
|
help='Prefix to add to the names of generated SKPs.',
|
|
|
|
default=None)
|
2014-10-17 14:37:05 +00:00
|
|
|
options, unused_args = option_parser.parse_args()
|
|
|
|
|
|
|
|
playback = SkPicturePlayback(options)
|
|
|
|
sys.exit(playback.Run())
|