[Recipes] Move test and perf steps into test and perf recipes
This looks like another big change, but I really just moved blocks of code around. Again, no expectations diffs because no behavior is changed. BUG=skia:5578 GOLD_TRYBOT_URL= https://gold.skia.org/search?issue=2198973002 Review-Url: https://codereview.chromium.org/2198973002
This commit is contained in:
parent
bffc256687
commit
bc20a701d8
@ -17,15 +17,6 @@ from recipe_engine import config_types
|
|||||||
from . import fake_specs
|
from . import fake_specs
|
||||||
|
|
||||||
|
|
||||||
TEST_EXPECTED_SKP_VERSION = '42'
|
|
||||||
TEST_EXPECTED_SK_IMAGE_VERSION = '42'
|
|
||||||
|
|
||||||
VERSION_FILE_SK_IMAGE = 'SK_IMAGE_VERSION'
|
|
||||||
VERSION_FILE_SKP = 'SKP_VERSION'
|
|
||||||
|
|
||||||
VERSION_NONE = -1
|
|
||||||
|
|
||||||
|
|
||||||
class SkiaApi(recipe_api.RecipeApi):
|
class SkiaApi(recipe_api.RecipeApi):
|
||||||
|
|
||||||
def get_builder_spec(self, skia_dir, builder_name):
|
def get_builder_spec(self, skia_dir, builder_name):
|
||||||
@ -176,323 +167,3 @@ class SkiaApi(recipe_api.RecipeApi):
|
|||||||
if self.m.vars.need_chromium_checkout:
|
if self.m.vars.need_chromium_checkout:
|
||||||
self.m.gclient.runhooks(cwd=self.m.vars.checkout_root,
|
self.m.gclient.runhooks(cwd=self.m.vars.checkout_root,
|
||||||
env=self.m.vars.gclient_env)
|
env=self.m.vars.gclient_env)
|
||||||
|
|
||||||
def copy_dir(self, host_version, version_file, tmp_dir,
|
|
||||||
host_path, device_path, test_expected_version,
|
|
||||||
test_actual_version):
|
|
||||||
actual_version_file = self.m.path.join(tmp_dir, version_file)
|
|
||||||
# Copy to device.
|
|
||||||
device_version_file = self.m.flavor.device_path_join(
|
|
||||||
self.m.flavor.device_dirs.tmp_dir, version_file)
|
|
||||||
if str(actual_version_file) != str(device_version_file):
|
|
||||||
try:
|
|
||||||
device_version = (
|
|
||||||
self.m.flavor.read_file_on_device(device_version_file))
|
|
||||||
except self.m.step.StepFailure:
|
|
||||||
device_version = VERSION_NONE
|
|
||||||
if device_version != host_version:
|
|
||||||
self.m.flavor.remove_file_on_device(device_version_file)
|
|
||||||
self.m.flavor.create_clean_device_dir(device_path)
|
|
||||||
self.m.flavor.copy_directory_contents_to_device(
|
|
||||||
host_path, device_path)
|
|
||||||
|
|
||||||
# Copy the new version file.
|
|
||||||
self.m.flavor.copy_file_to_device(actual_version_file,
|
|
||||||
device_version_file)
|
|
||||||
|
|
||||||
def _copy_images(self):
|
|
||||||
"""Download and copy test images if needed."""
|
|
||||||
version_file = self.m.vars.infrabots_dir.join(
|
|
||||||
'assets', 'skimage', 'VERSION')
|
|
||||||
test_data = self.m.properties.get(
|
|
||||||
'test_downloaded_sk_image_version', TEST_EXPECTED_SK_IMAGE_VERSION)
|
|
||||||
version = self.m.run.readfile(
|
|
||||||
version_file,
|
|
||||||
name='Get downloaded skimage VERSION',
|
|
||||||
test_data=test_data).rstrip()
|
|
||||||
self.m.run.writefile(
|
|
||||||
self.m.path.join(self.m.vars.tmp_dir, VERSION_FILE_SK_IMAGE),
|
|
||||||
version)
|
|
||||||
self.copy_dir(
|
|
||||||
version,
|
|
||||||
VERSION_FILE_SK_IMAGE,
|
|
||||||
self.m.vars.tmp_dir,
|
|
||||||
self.m.vars.images_dir,
|
|
||||||
self.m.flavor.device_dirs.images_dir,
|
|
||||||
test_expected_version=self.m.properties.get(
|
|
||||||
'test_downloaded_sk_image_version',
|
|
||||||
TEST_EXPECTED_SK_IMAGE_VERSION),
|
|
||||||
test_actual_version=self.m.properties.get(
|
|
||||||
'test_downloaded_sk_image_version',
|
|
||||||
TEST_EXPECTED_SK_IMAGE_VERSION))
|
|
||||||
return version
|
|
||||||
|
|
||||||
def _copy_skps(self):
|
|
||||||
"""Download and copy the SKPs if needed."""
|
|
||||||
version_file = self.m.vars.infrabots_dir.join(
|
|
||||||
'assets', 'skp', 'VERSION')
|
|
||||||
test_data = self.m.properties.get(
|
|
||||||
'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION)
|
|
||||||
version = self.m.run.readfile(
|
|
||||||
version_file,
|
|
||||||
name='Get downloaded SKP VERSION',
|
|
||||||
test_data=test_data).rstrip()
|
|
||||||
self.m.run.writefile(
|
|
||||||
self.m.path.join(self.m.vars.tmp_dir, VERSION_FILE_SKP),
|
|
||||||
version)
|
|
||||||
self.copy_dir(
|
|
||||||
version,
|
|
||||||
VERSION_FILE_SKP,
|
|
||||||
self.m.vars.tmp_dir,
|
|
||||||
self.m.vars.local_skp_dir,
|
|
||||||
self.m.flavor.device_dirs.skp_dir,
|
|
||||||
test_expected_version=self.m.properties.get(
|
|
||||||
'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION),
|
|
||||||
test_actual_version=self.m.properties.get(
|
|
||||||
'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION))
|
|
||||||
return version
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
"""Copy the required executables and files to the device."""
|
|
||||||
# Run any device-specific installation.
|
|
||||||
self.m.flavor.install()
|
|
||||||
|
|
||||||
# TODO(borenet): Only copy files which have changed.
|
|
||||||
# Resources
|
|
||||||
self.m.flavor.copy_directory_contents_to_device(
|
|
||||||
self.m.vars.resource_dir,
|
|
||||||
self.m.flavor.device_dirs.resource_dir)
|
|
||||||
|
|
||||||
def test_steps(self):
|
|
||||||
"""Run the DM test."""
|
|
||||||
self.m.run.run_once(self.install)
|
|
||||||
self.m.run.run_once(self._copy_skps)
|
|
||||||
self.m.run.run_once(self._copy_images)
|
|
||||||
|
|
||||||
use_hash_file = False
|
|
||||||
if self.m.vars.upload_dm_results:
|
|
||||||
# This must run before we write anything into
|
|
||||||
# self.m.flavor.device_dirs.dm_dir or we may end up deleting our
|
|
||||||
# output on machines where they're the same.
|
|
||||||
self.m.flavor.create_clean_host_dir(self.m.vars.dm_dir)
|
|
||||||
host_dm_dir = str(self.m.vars.dm_dir)
|
|
||||||
device_dm_dir = str(self.m.flavor.device_dirs.dm_dir)
|
|
||||||
if host_dm_dir != device_dm_dir:
|
|
||||||
self.m.flavor.create_clean_device_dir(device_dm_dir)
|
|
||||||
|
|
||||||
# Obtain the list of already-generated hashes.
|
|
||||||
hash_filename = 'uninteresting_hashes.txt'
|
|
||||||
|
|
||||||
# Ensure that the tmp_dir exists.
|
|
||||||
self.m.run.run_once(self.m.file.makedirs,
|
|
||||||
'tmp_dir',
|
|
||||||
self.m.vars.tmp_dir,
|
|
||||||
infra_step=True)
|
|
||||||
|
|
||||||
host_hashes_file = self.m.vars.tmp_dir.join(hash_filename)
|
|
||||||
hashes_file = self.m.flavor.device_path_join(
|
|
||||||
self.m.flavor.device_dirs.tmp_dir, hash_filename)
|
|
||||||
self.m.run(
|
|
||||||
self.m.python.inline,
|
|
||||||
'get uninteresting hashes',
|
|
||||||
program="""
|
|
||||||
import contextlib
|
|
||||||
import math
|
|
||||||
import socket
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
HASHES_URL = 'https://gold.skia.org/_/hashes'
|
|
||||||
RETRIES = 5
|
|
||||||
TIMEOUT = 60
|
|
||||||
WAIT_BASE = 15
|
|
||||||
|
|
||||||
socket.setdefaulttimeout(TIMEOUT)
|
|
||||||
for retry in range(RETRIES):
|
|
||||||
try:
|
|
||||||
with contextlib.closing(
|
|
||||||
urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:
|
|
||||||
hashes = w.read()
|
|
||||||
with open(sys.argv[1], 'w') as f:
|
|
||||||
f.write(hashes)
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
print 'Failed to get uninteresting hashes from %s:' % HASHES_URL
|
|
||||||
print e
|
|
||||||
if retry == RETRIES:
|
|
||||||
raise
|
|
||||||
waittime = WAIT_BASE * math.pow(2, retry)
|
|
||||||
print 'Retry in %d seconds.' % waittime
|
|
||||||
time.sleep(waittime)
|
|
||||||
""",
|
|
||||||
args=[host_hashes_file],
|
|
||||||
cwd=self.m.vars.skia_dir,
|
|
||||||
abort_on_failure=False,
|
|
||||||
fail_build_on_failure=False,
|
|
||||||
infra_step=True)
|
|
||||||
|
|
||||||
if self.m.path.exists(host_hashes_file):
|
|
||||||
self.m.flavor.copy_file_to_device(host_hashes_file, hashes_file)
|
|
||||||
use_hash_file = True
|
|
||||||
|
|
||||||
# Run DM.
|
|
||||||
properties = [
|
|
||||||
'gitHash', self.m.vars.got_revision,
|
|
||||||
'master', self.m.vars.master_name,
|
|
||||||
'builder', self.m.vars.builder_name,
|
|
||||||
'build_number', self.m.vars.build_number,
|
|
||||||
]
|
|
||||||
if self.m.vars.is_trybot:
|
|
||||||
properties.extend([
|
|
||||||
'issue', self.m.vars.issue,
|
|
||||||
'patchset', self.m.vars.patchset,
|
|
||||||
])
|
|
||||||
|
|
||||||
args = [
|
|
||||||
'dm',
|
|
||||||
'--undefok', # This helps branches that may not know new flags.
|
|
||||||
'--resourcePath', self.m.flavor.device_dirs.resource_dir,
|
|
||||||
'--skps', self.m.flavor.device_dirs.skp_dir,
|
|
||||||
'--images', self.m.flavor.device_path_join(
|
|
||||||
self.m.flavor.device_dirs.images_dir, 'dm'),
|
|
||||||
'--colorImages', self.m.flavor.device_path_join(
|
|
||||||
self.m.flavor.device_dirs.images_dir, 'colorspace'),
|
|
||||||
'--nameByHash',
|
|
||||||
'--properties'
|
|
||||||
] + properties
|
|
||||||
|
|
||||||
args.append('--key')
|
|
||||||
args.extend(self._KeyParams())
|
|
||||||
if use_hash_file:
|
|
||||||
args.extend(['--uninterestingHashesFile', hashes_file])
|
|
||||||
if self.m.vars.upload_dm_results:
|
|
||||||
args.extend(['--writePath', self.m.flavor.device_dirs.dm_dir])
|
|
||||||
|
|
||||||
skip_flag = None
|
|
||||||
if self.m.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
|
||||||
skip_flag = '--nogpu'
|
|
||||||
elif self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
|
||||||
skip_flag = '--nocpu'
|
|
||||||
if skip_flag:
|
|
||||||
args.append(skip_flag)
|
|
||||||
args.extend(self.m.vars.dm_flags)
|
|
||||||
|
|
||||||
self.m.run(self.m.flavor.step, 'dm', cmd=args,
|
|
||||||
abort_on_failure=False,
|
|
||||||
env=self.m.vars.default_env)
|
|
||||||
|
|
||||||
if self.m.vars.upload_dm_results:
|
|
||||||
# Copy images and JSON to host machine if needed.
|
|
||||||
self.m.flavor.copy_directory_contents_to_host(
|
|
||||||
self.m.flavor.device_dirs.dm_dir, self.m.vars.dm_dir)
|
|
||||||
|
|
||||||
# See skia:2789.
|
|
||||||
if ('Valgrind' in self.m.vars.builder_name and
|
|
||||||
self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
|
||||||
abandonGpuContext = list(args)
|
|
||||||
abandonGpuContext.append('--abandonGpuContext')
|
|
||||||
self.m.run(self.m.flavor.step, 'dm --abandonGpuContext',
|
|
||||||
cmd=abandonGpuContext, abort_on_failure=False)
|
|
||||||
preAbandonGpuContext = list(args)
|
|
||||||
preAbandonGpuContext.append('--preAbandonGpuContext')
|
|
||||||
self.m.run(self.m.flavor.step, 'dm --preAbandonGpuContext',
|
|
||||||
cmd=preAbandonGpuContext, abort_on_failure=False,
|
|
||||||
env=self.m.vars.default_env)
|
|
||||||
|
|
||||||
def perf_steps(self):
|
|
||||||
"""Run Skia benchmarks."""
|
|
||||||
self.m.run.run_once(self.install)
|
|
||||||
self.m.run.run_once(self._copy_skps)
|
|
||||||
self.m.run.run_once(self._copy_images)
|
|
||||||
|
|
||||||
if self.m.vars.upload_perf_results:
|
|
||||||
self.m.flavor.create_clean_device_dir(
|
|
||||||
self.m.flavor.device_dirs.perf_data_dir)
|
|
||||||
|
|
||||||
# Run nanobench.
|
|
||||||
properties = [
|
|
||||||
'--properties',
|
|
||||||
'gitHash', self.m.vars.got_revision,
|
|
||||||
'build_number', self.m.vars.build_number,
|
|
||||||
]
|
|
||||||
if self.m.vars.is_trybot:
|
|
||||||
properties.extend([
|
|
||||||
'issue', self.m.vars.issue,
|
|
||||||
'patchset', self.m.vars.patchset,
|
|
||||||
])
|
|
||||||
|
|
||||||
target = 'nanobench'
|
|
||||||
if 'VisualBench' in self.m.vars.builder_name:
|
|
||||||
target = 'visualbench'
|
|
||||||
args = [
|
|
||||||
target,
|
|
||||||
'--undefok', # This helps branches that may not know new flags.
|
|
||||||
'-i', self.m.flavor.device_dirs.resource_dir,
|
|
||||||
'--skps', self.m.flavor.device_dirs.skp_dir,
|
|
||||||
'--images', self.m.flavor.device_path_join(
|
|
||||||
self.m.flavor.device_dirs.images_dir, 'nanobench'),
|
|
||||||
]
|
|
||||||
|
|
||||||
skip_flag = None
|
|
||||||
if self.m.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
|
||||||
skip_flag = '--nogpu'
|
|
||||||
elif self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
|
||||||
skip_flag = '--nocpu'
|
|
||||||
if skip_flag:
|
|
||||||
args.append(skip_flag)
|
|
||||||
args.extend(self.m.vars.nanobench_flags)
|
|
||||||
|
|
||||||
if self.m.vars.upload_perf_results:
|
|
||||||
json_path = self.m.flavor.device_path_join(
|
|
||||||
self.m.flavor.device_dirs.perf_data_dir,
|
|
||||||
'nanobench_%s.json' % self.m.vars.got_revision)
|
|
||||||
args.extend(['--outResultsFile', json_path])
|
|
||||||
args.extend(properties)
|
|
||||||
|
|
||||||
keys_blacklist = ['configuration', 'role', 'is_trybot']
|
|
||||||
args.append('--key')
|
|
||||||
for k in sorted(self.m.vars.builder_cfg.keys()):
|
|
||||||
if not k in keys_blacklist:
|
|
||||||
args.extend([k, self.m.vars.builder_cfg[k]])
|
|
||||||
|
|
||||||
self.m.run(self.m.flavor.step, target, cmd=args,
|
|
||||||
abort_on_failure=False,
|
|
||||||
env=self.m.vars.default_env)
|
|
||||||
|
|
||||||
# See skia:2789.
|
|
||||||
if ('Valgrind' in self.m.vars.builder_name and
|
|
||||||
self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
|
||||||
abandonGpuContext = list(args)
|
|
||||||
abandonGpuContext.extend(['--abandonGpuContext', '--nocpu'])
|
|
||||||
self.m.run(self.m.flavor.step,
|
|
||||||
'%s --abandonGpuContext' % target,
|
|
||||||
cmd=abandonGpuContext, abort_on_failure=False,
|
|
||||||
env=self.m.vars.default_env)
|
|
||||||
|
|
||||||
# Upload results.
|
|
||||||
if self.m.vars.upload_perf_results:
|
|
||||||
self.m.file.makedirs('perf_dir', self.m.vars.perf_data_dir)
|
|
||||||
self.m.flavor.copy_directory_contents_to_host(
|
|
||||||
self.m.flavor.device_dirs.perf_data_dir,
|
|
||||||
self.m.vars.perf_data_dir)
|
|
||||||
|
|
||||||
def cleanup_steps(self):
|
|
||||||
"""Run any cleanup steps."""
|
|
||||||
self.m.flavor.cleanup_steps()
|
|
||||||
|
|
||||||
def _KeyParams(self):
|
|
||||||
"""Build a unique key from the builder name (as a list).
|
|
||||||
|
|
||||||
E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6
|
|
||||||
"""
|
|
||||||
# Don't bother to include role, which is always Test.
|
|
||||||
# TryBots are uploaded elsewhere so they can use the same key.
|
|
||||||
blacklist = ['role', 'is_trybot']
|
|
||||||
|
|
||||||
flat = []
|
|
||||||
for k in sorted(self.m.vars.builder_cfg.keys()):
|
|
||||||
if k not in blacklist:
|
|
||||||
flat.append(k)
|
|
||||||
flat.append(self.m.vars.builder_cfg[k])
|
|
||||||
return flat
|
|
||||||
|
@ -7,6 +7,7 @@ DEPS = [
|
|||||||
'build/file',
|
'build/file',
|
||||||
'recipe_engine/path',
|
'recipe_engine/path',
|
||||||
'recipe_engine/platform',
|
'recipe_engine/platform',
|
||||||
|
'recipe_engine/properties',
|
||||||
'recipe_engine/python',
|
'recipe_engine/python',
|
||||||
'recipe_engine/raw_io',
|
'recipe_engine/raw_io',
|
||||||
'recipe_engine/step',
|
'recipe_engine/step',
|
||||||
|
@ -19,6 +19,15 @@ from . import valgrind_flavor
|
|||||||
from . import xsan_flavor
|
from . import xsan_flavor
|
||||||
|
|
||||||
|
|
||||||
|
TEST_EXPECTED_SKP_VERSION = '42'
|
||||||
|
TEST_EXPECTED_SK_IMAGE_VERSION = '42'
|
||||||
|
|
||||||
|
VERSION_FILE_SK_IMAGE = 'SK_IMAGE_VERSION'
|
||||||
|
VERSION_FILE_SKP = 'SKP_VERSION'
|
||||||
|
|
||||||
|
VERSION_NONE = -1
|
||||||
|
|
||||||
|
|
||||||
def is_android(builder_cfg):
|
def is_android(builder_cfg):
|
||||||
"""Determine whether the given builder is an Android builder."""
|
"""Determine whether the given builder is an Android builder."""
|
||||||
return ('Android' in builder_cfg.get('extra_config', '') or
|
return ('Android' in builder_cfg.get('extra_config', '') or
|
||||||
@ -118,9 +127,90 @@ class SkiaFlavorApi(recipe_api.RecipeApi):
|
|||||||
return self._f.remove_file_on_device(path)
|
return self._f.remove_file_on_device(path)
|
||||||
|
|
||||||
def install(self):
|
def install(self):
|
||||||
rv = self._f.install()
|
self._f.install()
|
||||||
self.device_dirs = self._f.device_dirs
|
self.device_dirs = self._f.device_dirs
|
||||||
return rv
|
|
||||||
|
# TODO(borenet): Only copy files which have changed.
|
||||||
|
# Resources
|
||||||
|
self.copy_directory_contents_to_device(
|
||||||
|
self.m.vars.resource_dir,
|
||||||
|
self.device_dirs.resource_dir)
|
||||||
|
|
||||||
|
self._copy_skps()
|
||||||
|
self._copy_images()
|
||||||
|
|
||||||
def cleanup_steps(self):
|
def cleanup_steps(self):
|
||||||
return self._f.cleanup_steps()
|
return self._f.cleanup_steps()
|
||||||
|
|
||||||
|
def _copy_dir(self, host_version, version_file, tmp_dir,
|
||||||
|
host_path, device_path, test_expected_version,
|
||||||
|
test_actual_version):
|
||||||
|
actual_version_file = self.m.path.join(tmp_dir, version_file)
|
||||||
|
# Copy to device.
|
||||||
|
device_version_file = self.device_path_join(
|
||||||
|
self.device_dirs.tmp_dir, version_file)
|
||||||
|
if str(actual_version_file) != str(device_version_file):
|
||||||
|
try:
|
||||||
|
device_version = self.read_file_on_device(device_version_file)
|
||||||
|
except self.m.step.StepFailure:
|
||||||
|
device_version = VERSION_NONE
|
||||||
|
if device_version != host_version:
|
||||||
|
self.remove_file_on_device(device_version_file)
|
||||||
|
self.create_clean_device_dir(device_path)
|
||||||
|
self.copy_directory_contents_to_device(
|
||||||
|
host_path, device_path)
|
||||||
|
|
||||||
|
# Copy the new version file.
|
||||||
|
self.copy_file_to_device(actual_version_file, device_version_file)
|
||||||
|
|
||||||
|
def _copy_images(self):
|
||||||
|
"""Download and copy test images if needed."""
|
||||||
|
version_file = self.m.vars.infrabots_dir.join(
|
||||||
|
'assets', 'skimage', 'VERSION')
|
||||||
|
test_data = self.m.properties.get(
|
||||||
|
'test_downloaded_sk_image_version', TEST_EXPECTED_SK_IMAGE_VERSION)
|
||||||
|
version = self.m.run.readfile(
|
||||||
|
version_file,
|
||||||
|
name='Get downloaded skimage VERSION',
|
||||||
|
test_data=test_data).rstrip()
|
||||||
|
self.m.run.writefile(
|
||||||
|
self.m.path.join(self.m.vars.tmp_dir, VERSION_FILE_SK_IMAGE),
|
||||||
|
version)
|
||||||
|
self._copy_dir(
|
||||||
|
version,
|
||||||
|
VERSION_FILE_SK_IMAGE,
|
||||||
|
self.m.vars.tmp_dir,
|
||||||
|
self.m.vars.images_dir,
|
||||||
|
self.device_dirs.images_dir,
|
||||||
|
test_expected_version=self.m.properties.get(
|
||||||
|
'test_downloaded_sk_image_version',
|
||||||
|
TEST_EXPECTED_SK_IMAGE_VERSION),
|
||||||
|
test_actual_version=self.m.properties.get(
|
||||||
|
'test_downloaded_sk_image_version',
|
||||||
|
TEST_EXPECTED_SK_IMAGE_VERSION))
|
||||||
|
return version
|
||||||
|
|
||||||
|
def _copy_skps(self):
|
||||||
|
"""Download and copy the SKPs if needed."""
|
||||||
|
version_file = self.m.vars.infrabots_dir.join(
|
||||||
|
'assets', 'skp', 'VERSION')
|
||||||
|
test_data = self.m.properties.get(
|
||||||
|
'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION)
|
||||||
|
version = self.m.run.readfile(
|
||||||
|
version_file,
|
||||||
|
name='Get downloaded SKP VERSION',
|
||||||
|
test_data=test_data).rstrip()
|
||||||
|
self.m.run.writefile(
|
||||||
|
self.m.path.join(self.m.vars.tmp_dir, VERSION_FILE_SKP),
|
||||||
|
version)
|
||||||
|
self._copy_dir(
|
||||||
|
version,
|
||||||
|
VERSION_FILE_SKP,
|
||||||
|
self.m.vars.tmp_dir,
|
||||||
|
self.m.vars.local_skp_dir,
|
||||||
|
self.device_dirs.skp_dir,
|
||||||
|
test_expected_version=self.m.properties.get(
|
||||||
|
'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION),
|
||||||
|
test_actual_version=self.m.properties.get(
|
||||||
|
'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION))
|
||||||
|
return version
|
||||||
|
@ -70,7 +70,7 @@ for p in psutil.process_iter():
|
|||||||
''',
|
''',
|
||||||
infra_step=True)
|
infra_step=True)
|
||||||
|
|
||||||
api.core.cleanup_steps()
|
api.flavor.cleanup_steps()
|
||||||
api.run.check_failure()
|
api.run.check_failure()
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
|
|
||||||
|
|
||||||
DEPS = [
|
DEPS = [
|
||||||
|
'build/file',
|
||||||
'core',
|
'core',
|
||||||
'recipe_engine/json',
|
'recipe_engine/json',
|
||||||
'recipe_engine/path',
|
'recipe_engine/path',
|
||||||
@ -14,6 +15,8 @@ DEPS = [
|
|||||||
'recipe_engine/properties',
|
'recipe_engine/properties',
|
||||||
'recipe_engine/raw_io',
|
'recipe_engine/raw_io',
|
||||||
'run',
|
'run',
|
||||||
|
'flavor',
|
||||||
|
'vars',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -31,10 +34,85 @@ TEST_BUILDERS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def perf_steps(api):
|
||||||
|
"""Run Skia benchmarks."""
|
||||||
|
if api.vars.upload_perf_results:
|
||||||
|
api.flavor.create_clean_device_dir(
|
||||||
|
api.flavor.device_dirs.perf_data_dir)
|
||||||
|
|
||||||
|
# Run nanobench.
|
||||||
|
properties = [
|
||||||
|
'--properties',
|
||||||
|
'gitHash', api.vars.got_revision,
|
||||||
|
'build_number', api.vars.build_number,
|
||||||
|
]
|
||||||
|
if api.vars.is_trybot:
|
||||||
|
properties.extend([
|
||||||
|
'issue', api.vars.issue,
|
||||||
|
'patchset', api.vars.patchset,
|
||||||
|
])
|
||||||
|
|
||||||
|
target = 'nanobench'
|
||||||
|
if 'VisualBench' in api.vars.builder_name:
|
||||||
|
target = 'visualbench'
|
||||||
|
args = [
|
||||||
|
target,
|
||||||
|
'--undefok', # This helps branches that may not know new flags.
|
||||||
|
'-i', api.flavor.device_dirs.resource_dir,
|
||||||
|
'--skps', api.flavor.device_dirs.skp_dir,
|
||||||
|
'--images', api.flavor.device_path_join(
|
||||||
|
api.flavor.device_dirs.images_dir, 'nanobench'),
|
||||||
|
]
|
||||||
|
|
||||||
|
skip_flag = None
|
||||||
|
if api.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
||||||
|
skip_flag = '--nogpu'
|
||||||
|
elif api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
||||||
|
skip_flag = '--nocpu'
|
||||||
|
if skip_flag:
|
||||||
|
args.append(skip_flag)
|
||||||
|
args.extend(api.vars.nanobench_flags)
|
||||||
|
|
||||||
|
if api.vars.upload_perf_results:
|
||||||
|
json_path = api.flavor.device_path_join(
|
||||||
|
api.flavor.device_dirs.perf_data_dir,
|
||||||
|
'nanobench_%s.json' % api.vars.got_revision)
|
||||||
|
args.extend(['--outResultsFile', json_path])
|
||||||
|
args.extend(properties)
|
||||||
|
|
||||||
|
keys_blacklist = ['configuration', 'role', 'is_trybot']
|
||||||
|
args.append('--key')
|
||||||
|
for k in sorted(api.vars.builder_cfg.keys()):
|
||||||
|
if not k in keys_blacklist:
|
||||||
|
args.extend([k, api.vars.builder_cfg[k]])
|
||||||
|
|
||||||
|
api.run(api.flavor.step, target, cmd=args,
|
||||||
|
abort_on_failure=False,
|
||||||
|
env=api.vars.default_env)
|
||||||
|
|
||||||
|
# See skia:2789.
|
||||||
|
if ('Valgrind' in api.vars.builder_name and
|
||||||
|
api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
||||||
|
abandonGpuContext = list(args)
|
||||||
|
abandonGpuContext.extend(['--abandonGpuContext', '--nocpu'])
|
||||||
|
api.run(api.flavor.step,
|
||||||
|
'%s --abandonGpuContext' % target,
|
||||||
|
cmd=abandonGpuContext, abort_on_failure=False,
|
||||||
|
env=api.vars.default_env)
|
||||||
|
|
||||||
|
# Copy results to swarming out dir.
|
||||||
|
if api.vars.upload_perf_results:
|
||||||
|
api.file.makedirs('perf_dir', api.vars.perf_data_dir)
|
||||||
|
api.flavor.copy_directory_contents_to_host(
|
||||||
|
api.flavor.device_dirs.perf_data_dir,
|
||||||
|
api.vars.perf_data_dir)
|
||||||
|
|
||||||
|
|
||||||
def RunSteps(api):
|
def RunSteps(api):
|
||||||
api.core.setup()
|
api.core.setup()
|
||||||
api.core.perf_steps()
|
api.flavor.install()
|
||||||
api.core.cleanup_steps()
|
perf_steps(api)
|
||||||
|
api.flavor.cleanup_steps()
|
||||||
api.run.check_failure()
|
api.run.check_failure()
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,13 +7,17 @@
|
|||||||
|
|
||||||
|
|
||||||
DEPS = [
|
DEPS = [
|
||||||
|
'build/file',
|
||||||
'core',
|
'core',
|
||||||
'recipe_engine/json',
|
'recipe_engine/json',
|
||||||
'recipe_engine/path',
|
'recipe_engine/path',
|
||||||
'recipe_engine/platform',
|
'recipe_engine/platform',
|
||||||
'recipe_engine/properties',
|
'recipe_engine/properties',
|
||||||
|
'recipe_engine/python',
|
||||||
'recipe_engine/raw_io',
|
'recipe_engine/raw_io',
|
||||||
|
'flavor',
|
||||||
'run',
|
'run',
|
||||||
|
'vars',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -33,10 +37,162 @@ TEST_BUILDERS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def key_params(api):
|
||||||
|
"""Build a unique key from the builder name (as a list).
|
||||||
|
|
||||||
|
E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6
|
||||||
|
"""
|
||||||
|
# Don't bother to include role, which is always Test.
|
||||||
|
# TryBots are uploaded elsewhere so they can use the same key.
|
||||||
|
blacklist = ['role', 'is_trybot']
|
||||||
|
|
||||||
|
flat = []
|
||||||
|
for k in sorted(api.vars.builder_cfg.keys()):
|
||||||
|
if k not in blacklist:
|
||||||
|
flat.append(k)
|
||||||
|
flat.append(api.vars.builder_cfg[k])
|
||||||
|
return flat
|
||||||
|
|
||||||
|
|
||||||
|
def test_steps(api):
|
||||||
|
"""Run the DM test."""
|
||||||
|
use_hash_file = False
|
||||||
|
if api.vars.upload_dm_results:
|
||||||
|
# This must run before we write anything into
|
||||||
|
# api.flavor.device_dirs.dm_dir or we may end up deleting our
|
||||||
|
# output on machines where they're the same.
|
||||||
|
api.flavor.create_clean_host_dir(api.vars.dm_dir)
|
||||||
|
host_dm_dir = str(api.vars.dm_dir)
|
||||||
|
device_dm_dir = str(api.flavor.device_dirs.dm_dir)
|
||||||
|
if host_dm_dir != device_dm_dir:
|
||||||
|
api.flavor.create_clean_device_dir(device_dm_dir)
|
||||||
|
|
||||||
|
# Obtain the list of already-generated hashes.
|
||||||
|
hash_filename = 'uninteresting_hashes.txt'
|
||||||
|
|
||||||
|
# Ensure that the tmp_dir exists.
|
||||||
|
api.run.run_once(api.file.makedirs,
|
||||||
|
'tmp_dir',
|
||||||
|
api.vars.tmp_dir,
|
||||||
|
infra_step=True)
|
||||||
|
|
||||||
|
host_hashes_file = api.vars.tmp_dir.join(hash_filename)
|
||||||
|
hashes_file = api.flavor.device_path_join(
|
||||||
|
api.flavor.device_dirs.tmp_dir, hash_filename)
|
||||||
|
api.run(
|
||||||
|
api.python.inline,
|
||||||
|
'get uninteresting hashes',
|
||||||
|
program="""
|
||||||
|
import contextlib
|
||||||
|
import math
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
HASHES_URL = 'https://gold.skia.org/_/hashes'
|
||||||
|
RETRIES = 5
|
||||||
|
TIMEOUT = 60
|
||||||
|
WAIT_BASE = 15
|
||||||
|
|
||||||
|
socket.setdefaulttimeout(TIMEOUT)
|
||||||
|
for retry in range(RETRIES):
|
||||||
|
try:
|
||||||
|
with contextlib.closing(
|
||||||
|
urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:
|
||||||
|
hashes = w.read()
|
||||||
|
with open(sys.argv[1], 'w') as f:
|
||||||
|
f.write(hashes)
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print 'Failed to get uninteresting hashes from %s:' % HASHES_URL
|
||||||
|
print e
|
||||||
|
if retry == RETRIES:
|
||||||
|
raise
|
||||||
|
waittime = WAIT_BASE * math.pow(2, retry)
|
||||||
|
print 'Retry in %d seconds.' % waittime
|
||||||
|
time.sleep(waittime)
|
||||||
|
""",
|
||||||
|
args=[host_hashes_file],
|
||||||
|
cwd=api.vars.skia_dir,
|
||||||
|
abort_on_failure=False,
|
||||||
|
fail_build_on_failure=False,
|
||||||
|
infra_step=True)
|
||||||
|
|
||||||
|
if api.path.exists(host_hashes_file):
|
||||||
|
api.flavor.copy_file_to_device(host_hashes_file, hashes_file)
|
||||||
|
use_hash_file = True
|
||||||
|
|
||||||
|
# Run DM.
|
||||||
|
properties = [
|
||||||
|
'gitHash', api.vars.got_revision,
|
||||||
|
'master', api.vars.master_name,
|
||||||
|
'builder', api.vars.builder_name,
|
||||||
|
'build_number', api.vars.build_number,
|
||||||
|
]
|
||||||
|
if api.vars.is_trybot:
|
||||||
|
properties.extend([
|
||||||
|
'issue', api.vars.issue,
|
||||||
|
'patchset', api.vars.patchset,
|
||||||
|
])
|
||||||
|
|
||||||
|
args = [
|
||||||
|
'dm',
|
||||||
|
'--undefok', # This helps branches that may not know new flags.
|
||||||
|
'--resourcePath', api.flavor.device_dirs.resource_dir,
|
||||||
|
'--skps', api.flavor.device_dirs.skp_dir,
|
||||||
|
'--images', api.flavor.device_path_join(
|
||||||
|
api.flavor.device_dirs.images_dir, 'dm'),
|
||||||
|
'--colorImages', api.flavor.device_path_join(
|
||||||
|
api.flavor.device_dirs.images_dir, 'colorspace'),
|
||||||
|
'--nameByHash',
|
||||||
|
'--properties'
|
||||||
|
] + properties
|
||||||
|
|
||||||
|
args.append('--key')
|
||||||
|
args.extend(key_params(api))
|
||||||
|
if use_hash_file:
|
||||||
|
args.extend(['--uninterestingHashesFile', hashes_file])
|
||||||
|
if api.vars.upload_dm_results:
|
||||||
|
args.extend(['--writePath', api.flavor.device_dirs.dm_dir])
|
||||||
|
|
||||||
|
skip_flag = None
|
||||||
|
if api.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
||||||
|
skip_flag = '--nogpu'
|
||||||
|
elif api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
||||||
|
skip_flag = '--nocpu'
|
||||||
|
if skip_flag:
|
||||||
|
args.append(skip_flag)
|
||||||
|
args.extend(api.vars.dm_flags)
|
||||||
|
|
||||||
|
api.run(api.flavor.step, 'dm', cmd=args,
|
||||||
|
abort_on_failure=False,
|
||||||
|
env=api.vars.default_env)
|
||||||
|
|
||||||
|
if api.vars.upload_dm_results:
|
||||||
|
# Copy images and JSON to host machine if needed.
|
||||||
|
api.flavor.copy_directory_contents_to_host(
|
||||||
|
api.flavor.device_dirs.dm_dir, api.vars.dm_dir)
|
||||||
|
|
||||||
|
# See skia:2789.
|
||||||
|
if ('Valgrind' in api.vars.builder_name and
|
||||||
|
api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
||||||
|
abandonGpuContext = list(args)
|
||||||
|
abandonGpuContext.append('--abandonGpuContext')
|
||||||
|
api.run(api.flavor.step, 'dm --abandonGpuContext',
|
||||||
|
cmd=abandonGpuContext, abort_on_failure=False)
|
||||||
|
preAbandonGpuContext = list(args)
|
||||||
|
preAbandonGpuContext.append('--preAbandonGpuContext')
|
||||||
|
api.run(api.flavor.step, 'dm --preAbandonGpuContext',
|
||||||
|
cmd=preAbandonGpuContext, abort_on_failure=False,
|
||||||
|
env=api.vars.default_env)
|
||||||
|
|
||||||
|
|
||||||
def RunSteps(api):
|
def RunSteps(api):
|
||||||
api.core.setup()
|
api.core.setup()
|
||||||
api.core.test_steps()
|
api.flavor.install()
|
||||||
api.core.cleanup_steps()
|
test_steps(api)
|
||||||
|
api.flavor.cleanup_steps()
|
||||||
api.run.check_failure()
|
api.run.check_failure()
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user