Move most recipes into modules
The modules will eventually move into a shared repository. BUG=skia:6070 Change-Id: I622265d98a5446f9f3ebf2c6a197770398623769 Reviewed-on: https://skia-review.googlesource.com/6942 Reviewed-by: Ravi Mistry <rmistry@google.com> Commit-Queue: Eric Boren <borenet@google.com>
This commit is contained in:
parent
621ea115e0
commit
2b861d3a39
15
infra/bots/recipe_modules/compile/__init__.py
Normal file
15
infra/bots/recipe_modules/compile/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
DEPS = [
|
||||
'core',
|
||||
'recipe_engine/json',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/platform',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/python',
|
||||
'flavor',
|
||||
'run',
|
||||
'vars',
|
||||
]
|
81
infra/bots/recipe_modules/compile/api.py
Normal file
81
infra/bots/recipe_modules/compile/api.py
Normal file
@ -0,0 +1,81 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
# Recipe module for Skia Swarming compile.
|
||||
|
||||
|
||||
from recipe_engine import recipe_api
|
||||
|
||||
|
||||
def build_targets_from_builder_dict(builder_dict):
|
||||
"""Return a list of targets to build, depending on the builder type."""
|
||||
if builder_dict.get('extra_config') == 'iOS':
|
||||
return ['iOSShell']
|
||||
return ['most']
|
||||
|
||||
|
||||
def get_extra_env_vars(builder_dict):
|
||||
env = {}
|
||||
if builder_dict.get('compiler') == 'Clang':
|
||||
env['CC'] = '/usr/bin/clang'
|
||||
env['CXX'] = '/usr/bin/clang++'
|
||||
|
||||
# SKNX_NO_SIMD, SK_USE_DISCARDABLE_SCALEDIMAGECACHE, etc.
|
||||
extra_config = builder_dict.get('extra_config', '')
|
||||
if extra_config.startswith('SK') and extra_config.isupper():
|
||||
env['CPPFLAGS'] = '-D' + extra_config
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def get_gyp_defines(builder_dict):
|
||||
gyp_defs = {}
|
||||
|
||||
if (builder_dict.get('os') == 'iOS' or
|
||||
builder_dict.get('extra_config') == 'iOS'):
|
||||
gyp_defs['skia_arch_type'] = 'arm'
|
||||
gyp_defs['skia_clang_build'] = '1'
|
||||
gyp_defs['skia_os'] = 'ios'
|
||||
gyp_defs['skia_warnings_as_errors'] = 1
|
||||
|
||||
return gyp_defs
|
||||
|
||||
|
||||
class CompileApi(recipe_api.RecipeApi):
|
||||
def run(self):
|
||||
self.m.core.setup()
|
||||
|
||||
env = get_extra_env_vars(self.m.vars.builder_cfg)
|
||||
gyp_defs = get_gyp_defines(self.m.vars.builder_cfg)
|
||||
gyp_defs_list = ['%s=%s' % (k, v) for k, v in gyp_defs.iteritems()]
|
||||
gyp_defs_list.sort()
|
||||
env['GYP_DEFINES'] = ' '.join(gyp_defs_list)
|
||||
|
||||
build_targets = build_targets_from_builder_dict(self.m.vars.builder_cfg)
|
||||
|
||||
try:
|
||||
for target in build_targets:
|
||||
self.m.flavor.compile(target, env=env)
|
||||
self.m.run.copy_build_products(
|
||||
self.m.flavor.out_dir,
|
||||
self.m.vars.swarming_out_dir.join(
|
||||
'out', self.m.vars.configuration))
|
||||
self.m.flavor.copy_extra_build_products(self.m.vars.swarming_out_dir)
|
||||
finally:
|
||||
if 'Win' in self.m.vars.builder_cfg.get('os', ''):
|
||||
self.m.python.inline(
|
||||
name='cleanup',
|
||||
program='''import psutil
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
if p.name in ('mspdbsrv.exe', 'vctip.exe', 'cl.exe', 'link.exe'):
|
||||
p.kill()
|
||||
except psutil._error.AccessDenied:
|
||||
pass
|
||||
''',
|
||||
infra_step=True)
|
||||
|
||||
self.m.flavor.cleanup_steps()
|
||||
self.m.run.check_failure()
|
17
infra/bots/recipe_modules/perf/__init__.py
Normal file
17
infra/bots/recipe_modules/perf/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'core',
|
||||
'recipe_engine/json',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/platform',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/raw_io',
|
||||
'recipe_engine/time',
|
||||
'run',
|
||||
'flavor',
|
||||
'vars',
|
||||
]
|
217
infra/bots/recipe_modules/perf/api.py
Normal file
217
infra/bots/recipe_modules/perf/api.py
Normal file
@ -0,0 +1,217 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
# Recipe module for Skia Swarming perf.
|
||||
|
||||
|
||||
import calendar
|
||||
|
||||
from recipe_engine import recipe_api
|
||||
|
||||
|
||||
def nanobench_flags(bot):
|
||||
args = ['--pre_log']
|
||||
|
||||
if 'GPU' in bot:
|
||||
args.append('--images')
|
||||
args.extend(['--gpuStatsDump', 'true'])
|
||||
|
||||
if 'Android' in bot and 'GPU' in bot:
|
||||
args.extend(['--useThermalManager', '1,1,10,1000'])
|
||||
|
||||
args.extend(['--scales', '1.0', '1.1'])
|
||||
|
||||
if 'iOS' in bot:
|
||||
args.extend(['--skps', 'ignore_skps'])
|
||||
|
||||
config = ['8888', 'gpu', 'nonrendering', 'hwui' ]
|
||||
if 'AndroidOne' not in bot:
|
||||
config += [ 'f16', 'srgb' ]
|
||||
if '-GCE-' in bot:
|
||||
config += [ '565' ]
|
||||
# The NP produces a long error stream when we run with MSAA.
|
||||
if 'NexusPlayer' not in bot:
|
||||
if 'Android' in bot:
|
||||
# The NVIDIA_Shield has a regular OpenGL implementation. We bench that
|
||||
# instead of ES.
|
||||
if 'NVIDIA_Shield' in bot:
|
||||
config.remove('gpu')
|
||||
config.extend(['gl', 'glmsaa4', 'glnvpr4', 'glnvprdit4'])
|
||||
else:
|
||||
config.extend(['msaa4', 'nvpr4', 'nvprdit4'])
|
||||
else:
|
||||
config.extend(['msaa16', 'nvpr16', 'nvprdit16'])
|
||||
|
||||
# Bench instanced rendering on a limited number of platforms
|
||||
if 'Nexus6' in bot:
|
||||
config.append('esinst') # esinst4 isn't working yet on Adreno.
|
||||
elif 'PixelC' in bot:
|
||||
config.extend(['esinst', 'esinst4'])
|
||||
elif 'NVIDIA_Shield' in bot:
|
||||
config.extend(['glinst', 'glinst4'])
|
||||
elif 'MacMini6.2' in bot:
|
||||
config.extend(['glinst', 'glinst16'])
|
||||
|
||||
if 'CommandBuffer' in bot:
|
||||
config = ['commandbuffer']
|
||||
if 'Vulkan' in bot:
|
||||
config = ['vk']
|
||||
|
||||
if 'ANGLE' in bot:
|
||||
config.extend(['angle_d3d11_es2'])
|
||||
# The GL backend of ANGLE crashes on the perf bot currently.
|
||||
if 'Win' not in bot:
|
||||
config.extend(['angle_gl_es2'])
|
||||
|
||||
args.append('--config')
|
||||
args.extend(config)
|
||||
|
||||
if 'Valgrind' in bot:
|
||||
# Don't care about Valgrind performance.
|
||||
args.extend(['--loops', '1'])
|
||||
args.extend(['--samples', '1'])
|
||||
# Ensure that the bot framework does not think we have timed out.
|
||||
args.extend(['--keepAlive', 'true'])
|
||||
|
||||
match = []
|
||||
if 'Android' in bot:
|
||||
# Segfaults when run as GPU bench. Very large texture?
|
||||
match.append('~blurroundrect')
|
||||
match.append('~patch_grid') # skia:2847
|
||||
match.append('~desk_carsvg')
|
||||
if 'NexusPlayer' in bot:
|
||||
match.append('~desk_unicodetable')
|
||||
if 'Nexus5' in bot:
|
||||
match.append('~keymobi_shop_mobileweb_ebay_com.skp') # skia:5178
|
||||
if 'iOS' in bot:
|
||||
match.append('~blurroundrect')
|
||||
match.append('~patch_grid') # skia:2847
|
||||
match.append('~desk_carsvg')
|
||||
match.append('~keymobi')
|
||||
match.append('~path_hairline')
|
||||
match.append('~GLInstancedArraysBench') # skia:4714
|
||||
if 'IntelIris540' in bot and 'ANGLE' in bot:
|
||||
match.append('~tile_image_filter_tiled_64') # skia:6082
|
||||
|
||||
# We do not need or want to benchmark the decodes of incomplete images.
|
||||
# In fact, in nanobench we assert that the full image decode succeeds.
|
||||
match.append('~inc0.gif')
|
||||
match.append('~inc1.gif')
|
||||
match.append('~incInterlaced.gif')
|
||||
match.append('~inc0.jpg')
|
||||
match.append('~incGray.jpg')
|
||||
match.append('~inc0.wbmp')
|
||||
match.append('~inc1.wbmp')
|
||||
match.append('~inc0.webp')
|
||||
match.append('~inc1.webp')
|
||||
match.append('~inc0.ico')
|
||||
match.append('~inc1.ico')
|
||||
match.append('~inc0.png')
|
||||
match.append('~inc1.png')
|
||||
match.append('~inc2.png')
|
||||
match.append('~inc12.png')
|
||||
match.append('~inc13.png')
|
||||
match.append('~inc14.png')
|
||||
match.append('~inc0.webp')
|
||||
match.append('~inc1.webp')
|
||||
|
||||
if match:
|
||||
args.append('--match')
|
||||
args.extend(match)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def perf_steps(api):
|
||||
"""Run Skia benchmarks."""
|
||||
if api.vars.upload_perf_results:
|
||||
api.flavor.create_clean_device_dir(
|
||||
api.flavor.device_dirs.perf_data_dir)
|
||||
|
||||
# Run nanobench.
|
||||
properties = [
|
||||
'--properties',
|
||||
'gitHash', api.vars.got_revision,
|
||||
'build_number', api.vars.build_number,
|
||||
]
|
||||
if api.vars.is_trybot:
|
||||
properties.extend([
|
||||
'issue', api.vars.issue,
|
||||
'patchset', api.vars.patchset,
|
||||
'patch_storage', api.vars.patch_storage,
|
||||
])
|
||||
if api.vars.no_buildbot:
|
||||
properties.extend(['no_buildbot', 'True'])
|
||||
properties.extend(['swarming_bot_id', api.vars.swarming_bot_id])
|
||||
properties.extend(['swarming_task_id', api.vars.swarming_task_id])
|
||||
|
||||
target = 'nanobench'
|
||||
args = [
|
||||
target,
|
||||
'--undefok', # This helps branches that may not know new flags.
|
||||
'-i', api.flavor.device_dirs.resource_dir,
|
||||
'--skps', api.flavor.device_dirs.skp_dir,
|
||||
'--images', api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.images_dir, 'nanobench'),
|
||||
]
|
||||
|
||||
# Do not run svgs on Valgrind.
|
||||
if 'Valgrind' not in api.vars.builder_name:
|
||||
args.extend(['--svgs', api.flavor.device_dirs.svg_dir])
|
||||
|
||||
skip_flag = None
|
||||
if api.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
||||
skip_flag = '--nogpu'
|
||||
elif api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
||||
skip_flag = '--nocpu'
|
||||
if skip_flag:
|
||||
args.append(skip_flag)
|
||||
args.extend(nanobench_flags(api.vars.builder_name))
|
||||
|
||||
if api.vars.upload_perf_results:
|
||||
now = api.time.utcnow()
|
||||
ts = int(calendar.timegm(now.utctimetuple()))
|
||||
json_path = api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.perf_data_dir,
|
||||
'nanobench_%s_%d.json' % (api.vars.got_revision, ts))
|
||||
args.extend(['--outResultsFile', json_path])
|
||||
args.extend(properties)
|
||||
|
||||
keys_blacklist = ['configuration', 'role', 'is_trybot']
|
||||
args.append('--key')
|
||||
for k in sorted(api.vars.builder_cfg.keys()):
|
||||
if not k in keys_blacklist:
|
||||
args.extend([k, api.vars.builder_cfg[k]])
|
||||
|
||||
api.run(api.flavor.step, target, cmd=args,
|
||||
abort_on_failure=False,
|
||||
env=api.vars.default_env)
|
||||
|
||||
# See skia:2789.
|
||||
if ('Valgrind' in api.vars.builder_name and
|
||||
api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
||||
abandonGpuContext = list(args)
|
||||
abandonGpuContext.extend(['--abandonGpuContext', '--nocpu'])
|
||||
api.run(api.flavor.step,
|
||||
'%s --abandonGpuContext' % target,
|
||||
cmd=abandonGpuContext, abort_on_failure=False,
|
||||
env=api.vars.default_env)
|
||||
|
||||
# Copy results to swarming out dir.
|
||||
if api.vars.upload_perf_results:
|
||||
api.file.makedirs('perf_dir', api.vars.perf_data_dir)
|
||||
api.flavor.copy_directory_contents_to_host(
|
||||
api.flavor.device_dirs.perf_data_dir,
|
||||
api.vars.perf_data_dir)
|
||||
|
||||
class PerfApi(recipe_api.RecipeApi):
|
||||
def run(self):
|
||||
self.m.core.setup()
|
||||
try:
|
||||
self.m.flavor.install_everything()
|
||||
perf_steps(self.m)
|
||||
finally:
|
||||
self.m.flavor.cleanup_steps()
|
||||
self.m.run.check_failure()
|
17
infra/bots/recipe_modules/skpbench/__init__.py
Normal file
17
infra/bots/recipe_modules/skpbench/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'core',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/python',
|
||||
'recipe_engine/raw_io',
|
||||
'recipe_engine/step',
|
||||
'recipe_engine/time',
|
||||
'run',
|
||||
'flavor',
|
||||
'vars',
|
||||
]
|
89
infra/bots/recipe_modules/skpbench/api.py
Normal file
89
infra/bots/recipe_modules/skpbench/api.py
Normal file
@ -0,0 +1,89 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
# Recipe module for Skia Swarming skpbench.
|
||||
|
||||
|
||||
import calendar
|
||||
|
||||
from recipe_engine import recipe_api
|
||||
|
||||
|
||||
def _run(api, title, *cmd, **kwargs):
|
||||
return api.run(api.step, title, cmd=list(cmd),
|
||||
cwd=api.vars.skia_dir, **kwargs)
|
||||
|
||||
|
||||
def _adb(api, title, *cmd, **kwargs):
|
||||
if 'infra_step' not in kwargs:
|
||||
kwargs['infra_step'] = True
|
||||
return _run(api, title, 'adb', *cmd, **kwargs)
|
||||
|
||||
|
||||
def skpbench_steps(api):
|
||||
"""benchmark Skia using skpbench."""
|
||||
app = api.vars.skia_out.join(api.vars.configuration, 'skpbench')
|
||||
_adb(api, 'push skpbench', 'push', app, api.vars.android_bin_dir)
|
||||
|
||||
skpbench_dir = api.vars.slave_dir.join('skia', 'tools', 'skpbench')
|
||||
table = api.path.join(api.vars.swarming_out_dir, 'table')
|
||||
|
||||
config = 'gpu,esinst4'
|
||||
if 'Vulkan' in api.vars.builder_name:
|
||||
config = 'vk'
|
||||
|
||||
skpbench_args = [
|
||||
api.path.join(api.vars.android_bin_dir, 'skpbench'),
|
||||
api.path.join(api.vars.android_data_dir, 'skps'),
|
||||
'--adb',
|
||||
'--resultsfile', table,
|
||||
'--config', config]
|
||||
|
||||
api.run(api.python, 'skpbench',
|
||||
script=skpbench_dir.join('skpbench.py'),
|
||||
args=skpbench_args)
|
||||
|
||||
skiaperf_args = [
|
||||
table,
|
||||
'--properties',
|
||||
'gitHash', api.vars.got_revision,
|
||||
'build_number', api.vars.build_number,
|
||||
]
|
||||
|
||||
skiaperf_args.extend(['no_buildbot', 'True'])
|
||||
skiaperf_args.extend(['swarming_bot_id', api.vars.swarming_bot_id])
|
||||
skiaperf_args.extend(['swarming_task_id', api.vars.swarming_task_id])
|
||||
|
||||
now = api.time.utcnow()
|
||||
ts = int(calendar.timegm(now.utctimetuple()))
|
||||
api.file.makedirs('perf_dir', api.vars.perf_data_dir)
|
||||
json_path = api.path.join(
|
||||
api.vars.perf_data_dir,
|
||||
'skpbench_%s_%d.json' % (api.vars.got_revision, ts))
|
||||
|
||||
skiaperf_args.extend([
|
||||
'--outfile', json_path
|
||||
])
|
||||
|
||||
keys_blacklist = ['configuration', 'role', 'is_trybot']
|
||||
skiaperf_args.append('--key')
|
||||
for k in sorted(api.vars.builder_cfg.keys()):
|
||||
if not k in keys_blacklist:
|
||||
skiaperf_args.extend([k, api.vars.builder_cfg[k]])
|
||||
|
||||
api.run(api.python, 'Parse skpbench output into Perf json',
|
||||
script=skpbench_dir.join('skiaperf.py'),
|
||||
args=skiaperf_args)
|
||||
|
||||
|
||||
class SkpBenchApi(recipe_api.RecipeApi):
|
||||
def run(self):
|
||||
self.m.core.setup()
|
||||
try:
|
||||
self.m.flavor.install(skps=True)
|
||||
skpbench_steps(self.m)
|
||||
finally:
|
||||
self.m.flavor.cleanup_steps()
|
||||
self.m.run.check_failure()
|
17
infra/bots/recipe_modules/sktest/__init__.py
Normal file
17
infra/bots/recipe_modules/sktest/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'core',
|
||||
'recipe_engine/json',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/platform',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/python',
|
||||
'recipe_engine/raw_io',
|
||||
'flavor',
|
||||
'run',
|
||||
'vars',
|
||||
]
|
529
infra/bots/recipe_modules/sktest/api.py
Normal file
529
infra/bots/recipe_modules/sktest/api.py
Normal file
@ -0,0 +1,529 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
# Recipe module for Skia Swarming test.
|
||||
|
||||
|
||||
from recipe_engine import recipe_api
|
||||
|
||||
|
||||
def dm_flags(bot):
|
||||
args = []
|
||||
|
||||
# 32-bit desktop bots tend to run out of memory, because they have relatively
|
||||
# far more cores than RAM (e.g. 32 cores, 3G RAM). Hold them back a bit.
|
||||
if '-x86-' in bot and not 'NexusPlayer' in bot:
|
||||
args.extend('--threads 4'.split(' '))
|
||||
|
||||
# These are the canonical configs that we would ideally run on all bots. We
|
||||
# may opt out or substitute some below for specific bots
|
||||
configs = ['8888', 'srgb', 'gpu', 'gpudft', 'gpusrgb', 'pdf']
|
||||
# Add in either msaa4 or msaa16 to the canonical set of configs to run
|
||||
if 'Android' in bot or 'iOS' in bot:
|
||||
configs.append('msaa4')
|
||||
else:
|
||||
configs.append('msaa16')
|
||||
|
||||
# The NP produces a long error stream when we run with MSAA. The Tegra3 just
|
||||
# doesn't support it.
|
||||
if ('NexusPlayer' in bot or
|
||||
'Tegra3' in bot or
|
||||
# We aren't interested in fixing msaa bugs on iPad4.
|
||||
'iPad4' in bot or
|
||||
# skia:5792
|
||||
'iHD530' in bot or
|
||||
'IntelIris540' in bot):
|
||||
configs = [x for x in configs if 'msaa' not in x]
|
||||
|
||||
# The NP produces different images for dft on every run.
|
||||
if 'NexusPlayer' in bot:
|
||||
configs = [x for x in configs if 'gpudft' not in x]
|
||||
|
||||
# Runs out of memory on Android bots. Everyone else seems fine.
|
||||
if 'Android' in bot:
|
||||
configs.remove('pdf')
|
||||
|
||||
if '-GCE-' in bot:
|
||||
configs.extend(['565'])
|
||||
configs.extend(['f16'])
|
||||
configs.extend(['sp-8888', '2ndpic-8888']) # Test niche uses of SkPicture.
|
||||
configs.extend(['lite-8888']) # Experimental display list.
|
||||
|
||||
if '-TSAN' not in bot:
|
||||
if ('TegraK1' in bot or
|
||||
'TegraX1' in bot or
|
||||
'GTX550Ti' in bot or
|
||||
'GTX660' in bot or
|
||||
'GT610' in bot):
|
||||
if 'Android' in bot:
|
||||
configs.append('nvprdit4')
|
||||
else:
|
||||
configs.append('nvprdit16')
|
||||
|
||||
# We want to test the OpenGL config not the GLES config on the Shield
|
||||
if 'NVIDIA_Shield' in bot:
|
||||
configs = [x.replace('gpu', 'gl') for x in configs]
|
||||
configs = [x.replace('msaa', 'glmsaa') for x in configs]
|
||||
configs = [x.replace('nvpr', 'glnvpr') for x in configs]
|
||||
|
||||
# NP is running out of RAM when we run all these modes. skia:3255
|
||||
if 'NexusPlayer' not in bot:
|
||||
configs.extend(mode + '-8888' for mode in
|
||||
['serialize', 'tiles_rt', 'pic'])
|
||||
|
||||
# Test instanced rendering on a limited number of platforms
|
||||
if 'Nexus6' in bot:
|
||||
configs.append('esinst') # esinst4 isn't working yet on Adreno.
|
||||
elif 'NVIDIA_Shield' in bot:
|
||||
# Multisampled instanced configs use nvpr.
|
||||
configs = [x.replace('glnvpr', 'glinst') for x in configs]
|
||||
configs.append('glinst')
|
||||
elif 'PixelC' in bot:
|
||||
# Multisampled instanced configs use nvpr.
|
||||
configs = [x.replace('nvpr', 'esinst') for x in configs]
|
||||
configs.append('esinst')
|
||||
elif 'MacMini6.2' in bot:
|
||||
configs.extend(['glinst', 'glinst16'])
|
||||
|
||||
# CommandBuffer bot *only* runs the command_buffer config.
|
||||
if 'CommandBuffer' in bot:
|
||||
configs = ['commandbuffer']
|
||||
|
||||
# ANGLE bot *only* runs the angle configs
|
||||
if 'ANGLE' in bot:
|
||||
configs = ['angle_d3d11_es2',
|
||||
'angle_d3d9_es2',
|
||||
'angle_d3d11_es2_msaa4',
|
||||
'angle_gl_es2']
|
||||
|
||||
# Vulkan bot *only* runs the vk config.
|
||||
if 'Vulkan' in bot:
|
||||
configs = ['vk']
|
||||
|
||||
args.append('--config')
|
||||
args.extend(configs)
|
||||
|
||||
# Run tests, gms, and image decoding tests everywhere.
|
||||
args.extend('--src tests gm image colorImage svg'.split(' '))
|
||||
|
||||
if 'GalaxyS' in bot:
|
||||
args.extend(('--threads', '0'))
|
||||
|
||||
blacklisted = []
|
||||
def blacklist(quad):
|
||||
config, src, options, name = quad.split(' ') if type(quad) is str else quad
|
||||
if config == '_' or config in configs:
|
||||
blacklisted.extend([config, src, options, name])
|
||||
|
||||
# TODO: ???
|
||||
blacklist('f16 _ _ dstreadshuffle')
|
||||
blacklist('f16 image _ _')
|
||||
blacklist('gpusrgb image _ _')
|
||||
blacklist('glsrgb image _ _')
|
||||
|
||||
# Decoder tests are now performing gamma correct decodes. This means
|
||||
# that, when viewing the results, we need to perform a gamma correct
|
||||
# encode to PNG. Therefore, we run the image tests in srgb mode instead
|
||||
# of 8888.
|
||||
blacklist('8888 image _ _')
|
||||
|
||||
if 'Valgrind' in bot:
|
||||
# These take 18+ hours to run.
|
||||
blacklist('pdf gm _ fontmgr_iter')
|
||||
blacklist('pdf _ _ PANO_20121023_214540.jpg')
|
||||
blacklist('pdf skp _ worldjournal')
|
||||
blacklist('pdf skp _ desk_baidu.skp')
|
||||
blacklist('pdf skp _ desk_wikipedia.skp')
|
||||
blacklist('_ svg _ _')
|
||||
|
||||
if 'iOS' in bot:
|
||||
blacklist('gpu skp _ _')
|
||||
blacklist('msaa skp _ _')
|
||||
blacklist('msaa16 gm _ tilemodesProcess')
|
||||
|
||||
if 'Mac' in bot or 'iOS' in bot:
|
||||
# CG fails on questionable bmps
|
||||
blacklist('_ image gen_platf rgba32abf.bmp')
|
||||
blacklist('_ image gen_platf rgb24prof.bmp')
|
||||
blacklist('_ image gen_platf rgb24lprof.bmp')
|
||||
blacklist('_ image gen_platf 8bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 4bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 32bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 24bpp-pixeldata-cropped.bmp')
|
||||
|
||||
# CG has unpredictable behavior on this questionable gif
|
||||
# It's probably using uninitialized memory
|
||||
blacklist('_ image gen_platf frame_larger_than_image.gif')
|
||||
|
||||
# CG has unpredictable behavior on incomplete pngs
|
||||
# skbug.com/5774
|
||||
blacklist('_ image gen_platf inc0.png')
|
||||
blacklist('_ image gen_platf inc1.png')
|
||||
blacklist('_ image gen_platf inc2.png')
|
||||
blacklist('_ image gen_platf inc3.png')
|
||||
blacklist('_ image gen_platf inc4.png')
|
||||
blacklist('_ image gen_platf inc5.png')
|
||||
blacklist('_ image gen_platf inc6.png')
|
||||
blacklist('_ image gen_platf inc7.png')
|
||||
blacklist('_ image gen_platf inc8.png')
|
||||
blacklist('_ image gen_platf inc9.png')
|
||||
blacklist('_ image gen_platf inc10.png')
|
||||
blacklist('_ image gen_platf inc11.png')
|
||||
blacklist('_ image gen_platf inc12.png')
|
||||
blacklist('_ image gen_platf inc13.png')
|
||||
blacklist('_ image gen_platf inc14.png')
|
||||
|
||||
# WIC fails on questionable bmps
|
||||
if 'Win' in bot:
|
||||
blacklist('_ image gen_platf rle8-height-negative.bmp')
|
||||
blacklist('_ image gen_platf rle4-height-negative.bmp')
|
||||
blacklist('_ image gen_platf pal8os2v2.bmp')
|
||||
blacklist('_ image gen_platf pal8os2v2-16.bmp')
|
||||
blacklist('_ image gen_platf rgba32abf.bmp')
|
||||
blacklist('_ image gen_platf rgb24prof.bmp')
|
||||
blacklist('_ image gen_platf rgb24lprof.bmp')
|
||||
blacklist('_ image gen_platf 8bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 4bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 32bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 24bpp-pixeldata-cropped.bmp')
|
||||
if 'x86_64' in bot and 'CPU' in bot:
|
||||
# This GM triggers a SkSmallAllocator assert.
|
||||
blacklist('_ gm _ composeshader_bitmap')
|
||||
|
||||
if 'Android' in bot or 'iOS' in bot:
|
||||
# This test crashes the N9 (perhaps because of large malloc/frees). It also
|
||||
# is fairly slow and not platform-specific. So we just disable it on all of
|
||||
# Android and iOS. skia:5438
|
||||
blacklist('_ test _ GrShape')
|
||||
|
||||
if 'Win8' in bot:
|
||||
# bungeman: "Doesn't work on Windows anyway, produces unstable GMs with
|
||||
# 'Unexpected error' from DirectWrite"
|
||||
blacklist('_ gm _ fontscalerdistortable')
|
||||
# skia:5636
|
||||
blacklist('_ svg _ Nebraska-StateSeal.svg')
|
||||
|
||||
# skia:4095
|
||||
bad_serialize_gms = ['bleed_image',
|
||||
'c_gms',
|
||||
'colortype',
|
||||
'colortype_xfermodes',
|
||||
'drawfilter',
|
||||
'fontmgr_bounds_0.75_0',
|
||||
'fontmgr_bounds_1_-0.25',
|
||||
'fontmgr_bounds',
|
||||
'fontmgr_match',
|
||||
'fontmgr_iter',
|
||||
'imagemasksubset']
|
||||
|
||||
# skia:5589
|
||||
bad_serialize_gms.extend(['bitmapfilters',
|
||||
'bitmapshaders',
|
||||
'bleed',
|
||||
'bleed_alpha_bmp',
|
||||
'bleed_alpha_bmp_shader',
|
||||
'convex_poly_clip',
|
||||
'extractalpha',
|
||||
'filterbitmap_checkerboard_32_32_g8',
|
||||
'filterbitmap_image_mandrill_64',
|
||||
'shadows',
|
||||
'simpleaaclip_aaclip'])
|
||||
# skia:5595
|
||||
bad_serialize_gms.extend(['composeshader_bitmap',
|
||||
'scaled_tilemodes_npot',
|
||||
'scaled_tilemodes'])
|
||||
|
||||
# skia:5778
|
||||
bad_serialize_gms.append('typefacerendering_pfaMac')
|
||||
# skia:5942
|
||||
bad_serialize_gms.append('parsedpaths')
|
||||
|
||||
# these use a custom image generator which doesn't serialize
|
||||
bad_serialize_gms.append('ImageGeneratorExternal_rect')
|
||||
bad_serialize_gms.append('ImageGeneratorExternal_shader')
|
||||
|
||||
for test in bad_serialize_gms:
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
|
||||
if 'Mac' not in bot:
|
||||
for test in ['bleed_alpha_image', 'bleed_alpha_image_shader']:
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
# It looks like we skip these only for out-of-memory concerns.
|
||||
if 'Win' in bot or 'Android' in bot:
|
||||
for test in ['verylargebitmap', 'verylarge_picture_image']:
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
|
||||
# skia:4769
|
||||
for test in ['drawfilter']:
|
||||
blacklist([ 'sp-8888', 'gm', '_', test])
|
||||
blacklist([ 'pic-8888', 'gm', '_', test])
|
||||
blacklist(['2ndpic-8888', 'gm', '_', test])
|
||||
blacklist([ 'lite-8888', 'gm', '_', test])
|
||||
# skia:4703
|
||||
for test in ['image-cacherator-from-picture',
|
||||
'image-cacherator-from-raster',
|
||||
'image-cacherator-from-ctable']:
|
||||
blacklist([ 'sp-8888', 'gm', '_', test])
|
||||
blacklist([ 'pic-8888', 'gm', '_', test])
|
||||
blacklist([ '2ndpic-8888', 'gm', '_', test])
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
|
||||
# GM that requires raster-backed canvas
|
||||
for test in ['gamut', 'complexclip4_bw', 'complexclip4_aa']:
|
||||
blacklist([ 'sp-8888', 'gm', '_', test])
|
||||
blacklist([ 'pic-8888', 'gm', '_', test])
|
||||
blacklist([ 'lite-8888', 'gm', '_', test])
|
||||
blacklist([ '2ndpic-8888', 'gm', '_', test])
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
|
||||
# GM that not support tiles_rt
|
||||
for test in ['complexclip4_bw', 'complexclip4_aa']:
|
||||
blacklist([ 'tiles_rt-8888', 'gm', '_', test])
|
||||
|
||||
# Extensions for RAW images
|
||||
r = ["arw", "cr2", "dng", "nef", "nrw", "orf", "raf", "rw2", "pef", "srw",
|
||||
"ARW", "CR2", "DNG", "NEF", "NRW", "ORF", "RAF", "RW2", "PEF", "SRW"]
|
||||
|
||||
# skbug.com/4888
|
||||
# Blacklist RAW images (and a few large PNGs) on GPU bots
|
||||
# until we can resolve failures
|
||||
if 'GPU' in bot:
|
||||
blacklist('_ image _ interlaced1.png')
|
||||
blacklist('_ image _ interlaced2.png')
|
||||
blacklist('_ image _ interlaced3.png')
|
||||
for raw_ext in r:
|
||||
blacklist('_ image _ .%s' % raw_ext)
|
||||
|
||||
# Large image that overwhelms older Mac bots
|
||||
if 'MacMini4.1-GPU' in bot:
|
||||
blacklist('_ image _ abnormal.wbmp')
|
||||
blacklist(['msaa16', 'gm', '_', 'blurcircles'])
|
||||
|
||||
if 'Nexus5' in bot:
|
||||
# skia:5876
|
||||
blacklist(['msaa4', 'gm', '_', 'encode-platform'])
|
||||
|
||||
match = []
|
||||
if 'Valgrind' in bot: # skia:3021
|
||||
match.append('~Threaded')
|
||||
|
||||
if 'AndroidOne' in bot: # skia:4711
|
||||
match.append('~WritePixels')
|
||||
|
||||
if 'NexusPlayer' in bot:
|
||||
match.append('~ResourceCache')
|
||||
|
||||
if 'Nexus10' in bot:
|
||||
match.append('~CopySurface') # skia:5509
|
||||
match.append('~SRGBReadWritePixels') # skia:6097
|
||||
|
||||
if 'ANGLE' in bot and 'Debug' in bot:
|
||||
match.append('~GLPrograms') # skia:4717
|
||||
|
||||
if 'MSAN' in bot:
|
||||
match.extend(['~Once', '~Shared']) # Not sure what's up with these tests.
|
||||
|
||||
if 'TSAN' in bot:
|
||||
match.extend(['~ReadWriteAlpha']) # Flaky on TSAN-covered on nvidia bots.
|
||||
match.extend(['~RGBA4444TextureTest', # Flakier than they are important.
|
||||
'~RGB565TextureTest'])
|
||||
|
||||
if 'Vulkan' in bot and 'Adreno' in bot:
|
||||
# skia:5777
|
||||
match.extend(['~XfermodeImageFilterCroppedInput',
|
||||
'~GrTextureStripAtlasFlush',
|
||||
'~CopySurface'])
|
||||
|
||||
if 'Vulkan' in bot and 'GTX1070' in bot and 'Win' in bot:
|
||||
# skia:6092
|
||||
match.append('~GPUMemorySize')
|
||||
|
||||
if 'IntelIris540' in bot and 'ANGLE' in bot:
|
||||
match.append('~IntTexture') # skia:6086
|
||||
|
||||
if blacklisted:
|
||||
args.append('--blacklist')
|
||||
args.extend(blacklisted)
|
||||
|
||||
if match:
|
||||
args.append('--match')
|
||||
args.extend(match)
|
||||
|
||||
# These bots run out of memory running RAW codec tests. Do not run them in
|
||||
# parallel
|
||||
if ('NexusPlayer' in bot or 'Nexus5' in bot or 'Nexus9' in bot
|
||||
or 'Win8-MSVC-ShuttleB' in bot):
|
||||
args.append('--noRAW_threading')
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def key_params(api):
|
||||
"""Build a unique key from the builder name (as a list).
|
||||
|
||||
E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6
|
||||
"""
|
||||
# Don't bother to include role, which is always Test.
|
||||
# TryBots are uploaded elsewhere so they can use the same key.
|
||||
blacklist = ['role', 'is_trybot']
|
||||
|
||||
flat = []
|
||||
for k in sorted(api.vars.builder_cfg.keys()):
|
||||
if k not in blacklist:
|
||||
flat.append(k)
|
||||
flat.append(api.vars.builder_cfg[k])
|
||||
return flat
|
||||
|
||||
|
||||
def test_steps(api):
|
||||
"""Run the DM test."""
|
||||
use_hash_file = False
|
||||
if api.vars.upload_dm_results:
|
||||
# This must run before we write anything into
|
||||
# api.flavor.device_dirs.dm_dir or we may end up deleting our
|
||||
# output on machines where they're the same.
|
||||
api.flavor.create_clean_host_dir(api.vars.dm_dir)
|
||||
host_dm_dir = str(api.vars.dm_dir)
|
||||
device_dm_dir = str(api.flavor.device_dirs.dm_dir)
|
||||
if host_dm_dir != device_dm_dir:
|
||||
api.flavor.create_clean_device_dir(device_dm_dir)
|
||||
|
||||
# Obtain the list of already-generated hashes.
|
||||
hash_filename = 'uninteresting_hashes.txt'
|
||||
|
||||
# Ensure that the tmp_dir exists.
|
||||
api.run.run_once(api.file.makedirs,
|
||||
'tmp_dir',
|
||||
api.vars.tmp_dir,
|
||||
infra_step=True)
|
||||
|
||||
host_hashes_file = api.vars.tmp_dir.join(hash_filename)
|
||||
hashes_file = api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.tmp_dir, hash_filename)
|
||||
api.run(
|
||||
api.python.inline,
|
||||
'get uninteresting hashes',
|
||||
program="""
|
||||
import contextlib
|
||||
import math
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
HASHES_URL = 'https://gold.skia.org/_/hashes'
|
||||
RETRIES = 5
|
||||
TIMEOUT = 60
|
||||
WAIT_BASE = 15
|
||||
|
||||
socket.setdefaulttimeout(TIMEOUT)
|
||||
for retry in range(RETRIES):
|
||||
try:
|
||||
with contextlib.closing(
|
||||
urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:
|
||||
hashes = w.read()
|
||||
with open(sys.argv[1], 'w') as f:
|
||||
f.write(hashes)
|
||||
break
|
||||
except Exception as e:
|
||||
print 'Failed to get uninteresting hashes from %s:' % HASHES_URL
|
||||
print e
|
||||
if retry == RETRIES:
|
||||
raise
|
||||
waittime = WAIT_BASE * math.pow(2, retry)
|
||||
print 'Retry in %d seconds.' % waittime
|
||||
time.sleep(waittime)
|
||||
""",
|
||||
args=[host_hashes_file],
|
||||
cwd=api.vars.skia_dir,
|
||||
abort_on_failure=False,
|
||||
fail_build_on_failure=False,
|
||||
infra_step=True)
|
||||
|
||||
if api.path.exists(host_hashes_file):
|
||||
api.flavor.copy_file_to_device(host_hashes_file, hashes_file)
|
||||
use_hash_file = True
|
||||
|
||||
# Run DM.
|
||||
properties = [
|
||||
'gitHash', api.vars.got_revision,
|
||||
'master', api.vars.master_name,
|
||||
'builder', api.vars.builder_name,
|
||||
'build_number', api.vars.build_number,
|
||||
]
|
||||
if api.vars.is_trybot:
|
||||
properties.extend([
|
||||
'issue', api.vars.issue,
|
||||
'patchset', api.vars.patchset,
|
||||
'patch_storage', api.vars.patch_storage,
|
||||
])
|
||||
if api.vars.no_buildbot:
|
||||
properties.extend(['no_buildbot', 'True'])
|
||||
properties.extend(['swarming_bot_id', api.vars.swarming_bot_id])
|
||||
properties.extend(['swarming_task_id', api.vars.swarming_task_id])
|
||||
|
||||
args = [
|
||||
'dm',
|
||||
'--undefok', # This helps branches that may not know new flags.
|
||||
'--resourcePath', api.flavor.device_dirs.resource_dir,
|
||||
'--skps', api.flavor.device_dirs.skp_dir,
|
||||
'--images', api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.images_dir, 'dm'),
|
||||
'--colorImages', api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.images_dir, 'colorspace'),
|
||||
'--nameByHash',
|
||||
'--properties'
|
||||
] + properties
|
||||
|
||||
args.extend(['--svgs', api.flavor.device_dirs.svg_dir])
|
||||
|
||||
args.append('--key')
|
||||
args.extend(key_params(api))
|
||||
if use_hash_file:
|
||||
args.extend(['--uninterestingHashesFile', hashes_file])
|
||||
if api.vars.upload_dm_results:
|
||||
args.extend(['--writePath', api.flavor.device_dirs.dm_dir])
|
||||
|
||||
skip_flag = None
|
||||
if api.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
||||
skip_flag = '--nogpu'
|
||||
elif api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
||||
skip_flag = '--nocpu'
|
||||
if skip_flag:
|
||||
args.append(skip_flag)
|
||||
args.extend(dm_flags(api.vars.builder_name))
|
||||
|
||||
api.run(api.flavor.step, 'dm', cmd=args,
|
||||
abort_on_failure=False,
|
||||
env=api.vars.default_env)
|
||||
|
||||
if api.vars.upload_dm_results:
|
||||
# Copy images and JSON to host machine if needed.
|
||||
api.flavor.copy_directory_contents_to_host(
|
||||
api.flavor.device_dirs.dm_dir, api.vars.dm_dir)
|
||||
|
||||
# See skia:2789.
|
||||
if ('Valgrind' in api.vars.builder_name and
|
||||
api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
||||
abandonGpuContext = list(args)
|
||||
abandonGpuContext.append('--abandonGpuContext')
|
||||
api.run(api.flavor.step, 'dm --abandonGpuContext',
|
||||
cmd=abandonGpuContext, abort_on_failure=False)
|
||||
preAbandonGpuContext = list(args)
|
||||
preAbandonGpuContext.append('--preAbandonGpuContext')
|
||||
api.run(api.flavor.step, 'dm --preAbandonGpuContext',
|
||||
cmd=preAbandonGpuContext, abort_on_failure=False,
|
||||
env=api.vars.default_env)
|
||||
|
||||
|
||||
class TestApi(recipe_api.RecipeApi):
|
||||
def run(self):
|
||||
self.m.core.setup()
|
||||
try:
|
||||
self.m.flavor.install_everything()
|
||||
test_steps(self.m)
|
||||
finally:
|
||||
self.m.flavor.cleanup_steps()
|
||||
self.m.run.check_failure()
|
13
infra/bots/recipe_modules/upload_dm_results/__init__.py
Normal file
13
infra/bots/recipe_modules/upload_dm_results/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'recipe_engine/json',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/shutil',
|
||||
'recipe_engine/step',
|
||||
'recipe_engine/time',
|
||||
]
|
91
infra/bots/recipe_modules/upload_dm_results/api.py
Normal file
91
infra/bots/recipe_modules/upload_dm_results/api.py
Normal file
@ -0,0 +1,91 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
# Recipe for uploading DM results.
|
||||
|
||||
|
||||
|
||||
import calendar
|
||||
|
||||
from recipe_engine import recipe_api
|
||||
|
||||
|
||||
DM_JSON = 'dm.json'
|
||||
GS_BUCKET = 'gs://skia-infra-gm'
|
||||
UPLOAD_ATTEMPTS = 5
|
||||
VERBOSE_LOG = 'verbose.log'
|
||||
|
||||
|
||||
class UploadDmResultsApi(recipe_api.RecipeApi):
|
||||
def cp(self, name, src, dst, extra_args=None):
|
||||
cmd = ['gsutil', 'cp']
|
||||
if extra_args:
|
||||
cmd.extend(extra_args)
|
||||
cmd.extend([src, dst])
|
||||
|
||||
name = 'upload %s' % name
|
||||
for i in xrange(UPLOAD_ATTEMPTS):
|
||||
step_name = name
|
||||
if i > 0:
|
||||
step_name += ' (attempt %d)' % (i+1)
|
||||
try:
|
||||
self.m.step(step_name, cmd=cmd)
|
||||
break
|
||||
except self.m.step.StepFailure:
|
||||
if i == UPLOAD_ATTEMPTS - 1:
|
||||
raise
|
||||
|
||||
def run(self):
|
||||
builder_name = self.m.properties['buildername']
|
||||
revision = self.m.properties['revision']
|
||||
|
||||
results_dir = self.m.path['start_dir'].join('dm')
|
||||
|
||||
# Move dm.json and verbose.log to their own directory.
|
||||
json_file = results_dir.join(DM_JSON)
|
||||
log_file = results_dir.join(VERBOSE_LOG)
|
||||
tmp_dir = self.m.path['start_dir'].join('tmp_upload')
|
||||
self.m.shutil.makedirs('tmp dir', tmp_dir, infra_step=True)
|
||||
self.m.shutil.copy('copy dm.json', json_file, tmp_dir)
|
||||
self.m.shutil.copy('copy verbose.log', log_file, tmp_dir)
|
||||
self.m.shutil.remove('rm old dm.json', json_file)
|
||||
self.m.shutil.remove('rm old verbose.log', log_file)
|
||||
|
||||
# Upload the images.
|
||||
image_dest_path = '/'.join((GS_BUCKET, 'dm-images-v1'))
|
||||
files_to_upload = self.m.file.glob(
|
||||
'find images',
|
||||
results_dir.join('*'),
|
||||
test_data=['someimage.png'],
|
||||
infra_step=True)
|
||||
if len(files_to_upload) > 0:
|
||||
self.cp('images', results_dir.join('*'), image_dest_path)
|
||||
|
||||
# Upload the JSON summary and verbose.log.
|
||||
now = self.m.time.utcnow()
|
||||
summary_dest_path = '/'.join([
|
||||
'dm-json-v1',
|
||||
str(now.year ).zfill(4),
|
||||
str(now.month).zfill(2),
|
||||
str(now.day ).zfill(2),
|
||||
str(now.hour ).zfill(2),
|
||||
revision,
|
||||
builder_name,
|
||||
str(int(calendar.timegm(now.utctimetuple())))])
|
||||
|
||||
# Trybot results are further siloed by issue/patchset.
|
||||
issue = str(self.m.properties.get('issue', ''))
|
||||
patchset = str(self.m.properties.get('patchset', ''))
|
||||
if self.m.properties.get('patch_storage', '') == 'gerrit':
|
||||
issue = str(self.m.properties['patch_issue'])
|
||||
patchset = str(self.m.properties['patch_set'])
|
||||
if issue and patchset:
|
||||
summary_dest_path = '/'.join((
|
||||
'trybot', summary_dest_path, issue, patchset))
|
||||
|
||||
summary_dest_path = '/'.join((GS_BUCKET, summary_dest_path))
|
||||
|
||||
self.cp('JSON and logs', tmp_dir.join('*'), summary_dest_path,
|
||||
['-z', 'json,log'])
|
11
infra/bots/recipe_modules/upload_nano_results/__init__.py
Normal file
11
infra/bots/recipe_modules/upload_nano_results/__init__.py
Normal file
@ -0,0 +1,11 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/step',
|
||||
'recipe_engine/time',
|
||||
]
|
49
infra/bots/recipe_modules/upload_nano_results/api.py
Normal file
49
infra/bots/recipe_modules/upload_nano_results/api.py
Normal file
@ -0,0 +1,49 @@
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
# Recipe for uploading nanobench results.
|
||||
|
||||
|
||||
from recipe_engine import recipe_api
|
||||
|
||||
|
||||
class UploadNanoResultsApi(recipe_api.RecipeApi):
|
||||
def run(self):
|
||||
# Upload the nanobench resuls.
|
||||
builder_name = self.m.properties['buildername']
|
||||
|
||||
now = self.m.time.utcnow()
|
||||
src_path = self.m.path['start_dir'].join(
|
||||
'perfdata', builder_name, 'data')
|
||||
results = self.m.file.glob(
|
||||
'find results',
|
||||
'*.json',
|
||||
cwd=src_path,
|
||||
test_data=['nanobench_abc123.json'],
|
||||
infra_step=True)
|
||||
if len(results) != 1: # pragma: nocover
|
||||
raise Exception('Unable to find nanobench or skpbench JSON file!')
|
||||
|
||||
src = src_path.join(results[0])
|
||||
basename = self.m.path.basename(src)
|
||||
gs_path = '/'.join((
|
||||
'nano-json-v1', str(now.year).zfill(4),
|
||||
str(now.month).zfill(2), str(now.day).zfill(2), str(now.hour).zfill(2),
|
||||
builder_name))
|
||||
|
||||
issue = str(self.m.properties.get('issue', ''))
|
||||
patchset = str(self.m.properties.get('patchset', ''))
|
||||
if self.m.properties.get('patch_storage', '') == 'gerrit':
|
||||
issue = str(self.m.properties['patch_issue'])
|
||||
patchset = str(self.m.properties['patch_set'])
|
||||
if issue and patchset:
|
||||
gs_path = '/'.join(('trybot', gs_path, issue, patchset))
|
||||
|
||||
dst = '/'.join(('gs://skia-perf', gs_path, basename))
|
||||
|
||||
self.m.step(
|
||||
'upload',
|
||||
cmd=['gsutil', 'cp', '-a', 'public-read', '-z', 'json', src, dst],
|
||||
infra_step=True)
|
@ -7,15 +7,10 @@
|
||||
|
||||
|
||||
DEPS = [
|
||||
'core',
|
||||
'recipe_engine/json',
|
||||
'compile',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/platform',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/python',
|
||||
'flavor',
|
||||
'run',
|
||||
'vars',
|
||||
]
|
||||
|
||||
|
||||
@ -56,75 +51,8 @@ TEST_BUILDERS = {
|
||||
}
|
||||
|
||||
|
||||
def build_targets_from_builder_dict(builder_dict):
|
||||
"""Return a list of targets to build, depending on the builder type."""
|
||||
if builder_dict.get('extra_config') == 'iOS':
|
||||
return ['iOSShell']
|
||||
return ['most']
|
||||
|
||||
|
||||
def get_extra_env_vars(builder_dict):
|
||||
env = {}
|
||||
if builder_dict.get('compiler') == 'Clang':
|
||||
env['CC'] = '/usr/bin/clang'
|
||||
env['CXX'] = '/usr/bin/clang++'
|
||||
|
||||
# SKNX_NO_SIMD, SK_USE_DISCARDABLE_SCALEDIMAGECACHE, etc.
|
||||
extra_config = builder_dict.get('extra_config', '')
|
||||
if extra_config.startswith('SK') and extra_config.isupper():
|
||||
env['CPPFLAGS'] = '-D' + extra_config
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def get_gyp_defines(builder_dict):
|
||||
gyp_defs = {}
|
||||
|
||||
if (builder_dict.get('os') == 'iOS' or
|
||||
builder_dict.get('extra_config') == 'iOS'):
|
||||
gyp_defs['skia_arch_type'] = 'arm'
|
||||
gyp_defs['skia_clang_build'] = '1'
|
||||
gyp_defs['skia_os'] = 'ios'
|
||||
gyp_defs['skia_warnings_as_errors'] = 1
|
||||
|
||||
return gyp_defs
|
||||
|
||||
|
||||
def RunSteps(api):
|
||||
api.core.setup()
|
||||
|
||||
env = get_extra_env_vars(api.vars.builder_cfg)
|
||||
gyp_defs = get_gyp_defines(api.vars.builder_cfg)
|
||||
gyp_defs_list = ['%s=%s' % (k, v) for k, v in gyp_defs.iteritems()]
|
||||
gyp_defs_list.sort()
|
||||
env['GYP_DEFINES'] = ' '.join(gyp_defs_list)
|
||||
|
||||
build_targets = build_targets_from_builder_dict(api.vars.builder_cfg)
|
||||
|
||||
try:
|
||||
for target in build_targets:
|
||||
api.flavor.compile(target, env=env)
|
||||
api.run.copy_build_products(
|
||||
api.flavor.out_dir,
|
||||
api.vars.swarming_out_dir.join(
|
||||
'out', api.vars.configuration))
|
||||
api.flavor.copy_extra_build_products(api.vars.swarming_out_dir)
|
||||
finally:
|
||||
if 'Win' in api.vars.builder_cfg.get('os', ''):
|
||||
api.python.inline(
|
||||
name='cleanup',
|
||||
program='''import psutil
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
if p.name in ('mspdbsrv.exe', 'vctip.exe', 'cl.exe', 'link.exe'):
|
||||
p.kill()
|
||||
except psutil._error.AccessDenied:
|
||||
pass
|
||||
''',
|
||||
infra_step=True)
|
||||
|
||||
api.flavor.cleanup_steps()
|
||||
api.run.check_failure()
|
||||
api.compile.run()
|
||||
|
||||
|
||||
def GenTests(api):
|
||||
|
@ -7,17 +7,11 @@
|
||||
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'core',
|
||||
'recipe_engine/json',
|
||||
'perf',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/platform',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/raw_io',
|
||||
'recipe_engine/time',
|
||||
'run',
|
||||
'flavor',
|
||||
'vars',
|
||||
]
|
||||
|
||||
|
||||
@ -47,213 +41,8 @@ TEST_BUILDERS = {
|
||||
}
|
||||
|
||||
|
||||
import calendar
|
||||
|
||||
|
||||
def nanobench_flags(bot):
|
||||
args = ['--pre_log']
|
||||
|
||||
if 'GPU' in bot:
|
||||
args.append('--images')
|
||||
args.extend(['--gpuStatsDump', 'true'])
|
||||
|
||||
if 'Android' in bot and 'GPU' in bot:
|
||||
args.extend(['--useThermalManager', '1,1,10,1000'])
|
||||
|
||||
args.extend(['--scales', '1.0', '1.1'])
|
||||
|
||||
if 'iOS' in bot:
|
||||
args.extend(['--skps', 'ignore_skps'])
|
||||
|
||||
config = ['8888', 'gpu', 'nonrendering', 'hwui' ]
|
||||
if 'AndroidOne' not in bot:
|
||||
config += [ 'f16', 'srgb' ]
|
||||
if '-GCE-' in bot:
|
||||
config += [ '565' ]
|
||||
# The NP produces a long error stream when we run with MSAA.
|
||||
if 'NexusPlayer' not in bot:
|
||||
if 'Android' in bot:
|
||||
# The NVIDIA_Shield has a regular OpenGL implementation. We bench that
|
||||
# instead of ES.
|
||||
if 'NVIDIA_Shield' in bot:
|
||||
config.remove('gpu')
|
||||
config.extend(['gl', 'glmsaa4', 'glnvpr4', 'glnvprdit4'])
|
||||
else:
|
||||
config.extend(['msaa4', 'nvpr4', 'nvprdit4'])
|
||||
else:
|
||||
config.extend(['msaa16', 'nvpr16', 'nvprdit16'])
|
||||
|
||||
# Bench instanced rendering on a limited number of platforms
|
||||
if 'Nexus6' in bot:
|
||||
config.append('esinst') # esinst4 isn't working yet on Adreno.
|
||||
elif 'PixelC' in bot:
|
||||
config.extend(['esinst', 'esinst4'])
|
||||
elif 'NVIDIA_Shield' in bot:
|
||||
config.extend(['glinst', 'glinst4'])
|
||||
elif 'MacMini6.2' in bot:
|
||||
config.extend(['glinst', 'glinst16'])
|
||||
|
||||
if 'CommandBuffer' in bot:
|
||||
config = ['commandbuffer']
|
||||
if 'Vulkan' in bot:
|
||||
config = ['vk']
|
||||
|
||||
if 'ANGLE' in bot:
|
||||
config.extend(['angle_d3d11_es2'])
|
||||
# The GL backend of ANGLE crashes on the perf bot currently.
|
||||
if 'Win' not in bot:
|
||||
config.extend(['angle_gl_es2'])
|
||||
|
||||
args.append('--config')
|
||||
args.extend(config)
|
||||
|
||||
if 'Valgrind' in bot:
|
||||
# Don't care about Valgrind performance.
|
||||
args.extend(['--loops', '1'])
|
||||
args.extend(['--samples', '1'])
|
||||
# Ensure that the bot framework does not think we have timed out.
|
||||
args.extend(['--keepAlive', 'true'])
|
||||
|
||||
match = []
|
||||
if 'Android' in bot:
|
||||
# Segfaults when run as GPU bench. Very large texture?
|
||||
match.append('~blurroundrect')
|
||||
match.append('~patch_grid') # skia:2847
|
||||
match.append('~desk_carsvg')
|
||||
if 'NexusPlayer' in bot:
|
||||
match.append('~desk_unicodetable')
|
||||
if 'Nexus5' in bot:
|
||||
match.append('~keymobi_shop_mobileweb_ebay_com.skp') # skia:5178
|
||||
if 'iOS' in bot:
|
||||
match.append('~blurroundrect')
|
||||
match.append('~patch_grid') # skia:2847
|
||||
match.append('~desk_carsvg')
|
||||
match.append('~keymobi')
|
||||
match.append('~path_hairline')
|
||||
match.append('~GLInstancedArraysBench') # skia:4714
|
||||
if 'IntelIris540' in bot and 'ANGLE' in bot:
|
||||
match.append('~tile_image_filter_tiled_64') # skia:6082
|
||||
|
||||
# We do not need or want to benchmark the decodes of incomplete images.
|
||||
# In fact, in nanobench we assert that the full image decode succeeds.
|
||||
match.append('~inc0.gif')
|
||||
match.append('~inc1.gif')
|
||||
match.append('~incInterlaced.gif')
|
||||
match.append('~inc0.jpg')
|
||||
match.append('~incGray.jpg')
|
||||
match.append('~inc0.wbmp')
|
||||
match.append('~inc1.wbmp')
|
||||
match.append('~inc0.webp')
|
||||
match.append('~inc1.webp')
|
||||
match.append('~inc0.ico')
|
||||
match.append('~inc1.ico')
|
||||
match.append('~inc0.png')
|
||||
match.append('~inc1.png')
|
||||
match.append('~inc2.png')
|
||||
match.append('~inc12.png')
|
||||
match.append('~inc13.png')
|
||||
match.append('~inc14.png')
|
||||
match.append('~inc0.webp')
|
||||
match.append('~inc1.webp')
|
||||
|
||||
if match:
|
||||
args.append('--match')
|
||||
args.extend(match)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def perf_steps(api):
|
||||
"""Run Skia benchmarks."""
|
||||
if api.vars.upload_perf_results:
|
||||
api.flavor.create_clean_device_dir(
|
||||
api.flavor.device_dirs.perf_data_dir)
|
||||
|
||||
# Run nanobench.
|
||||
properties = [
|
||||
'--properties',
|
||||
'gitHash', api.vars.got_revision,
|
||||
'build_number', api.vars.build_number,
|
||||
]
|
||||
if api.vars.is_trybot:
|
||||
properties.extend([
|
||||
'issue', api.vars.issue,
|
||||
'patchset', api.vars.patchset,
|
||||
'patch_storage', api.vars.patch_storage,
|
||||
])
|
||||
if api.vars.no_buildbot:
|
||||
properties.extend(['no_buildbot', 'True'])
|
||||
properties.extend(['swarming_bot_id', api.vars.swarming_bot_id])
|
||||
properties.extend(['swarming_task_id', api.vars.swarming_task_id])
|
||||
|
||||
target = 'nanobench'
|
||||
args = [
|
||||
target,
|
||||
'--undefok', # This helps branches that may not know new flags.
|
||||
'-i', api.flavor.device_dirs.resource_dir,
|
||||
'--skps', api.flavor.device_dirs.skp_dir,
|
||||
'--images', api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.images_dir, 'nanobench'),
|
||||
]
|
||||
|
||||
# Do not run svgs on Valgrind.
|
||||
if 'Valgrind' not in api.vars.builder_name:
|
||||
args.extend(['--svgs', api.flavor.device_dirs.svg_dir])
|
||||
|
||||
skip_flag = None
|
||||
if api.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
||||
skip_flag = '--nogpu'
|
||||
elif api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
||||
skip_flag = '--nocpu'
|
||||
if skip_flag:
|
||||
args.append(skip_flag)
|
||||
args.extend(nanobench_flags(api.vars.builder_name))
|
||||
|
||||
if api.vars.upload_perf_results:
|
||||
now = api.time.utcnow()
|
||||
ts = int(calendar.timegm(now.utctimetuple()))
|
||||
json_path = api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.perf_data_dir,
|
||||
'nanobench_%s_%d.json' % (api.vars.got_revision, ts))
|
||||
args.extend(['--outResultsFile', json_path])
|
||||
args.extend(properties)
|
||||
|
||||
keys_blacklist = ['configuration', 'role', 'is_trybot']
|
||||
args.append('--key')
|
||||
for k in sorted(api.vars.builder_cfg.keys()):
|
||||
if not k in keys_blacklist:
|
||||
args.extend([k, api.vars.builder_cfg[k]])
|
||||
|
||||
api.run(api.flavor.step, target, cmd=args,
|
||||
abort_on_failure=False,
|
||||
env=api.vars.default_env)
|
||||
|
||||
# See skia:2789.
|
||||
if ('Valgrind' in api.vars.builder_name and
|
||||
api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
||||
abandonGpuContext = list(args)
|
||||
abandonGpuContext.extend(['--abandonGpuContext', '--nocpu'])
|
||||
api.run(api.flavor.step,
|
||||
'%s --abandonGpuContext' % target,
|
||||
cmd=abandonGpuContext, abort_on_failure=False,
|
||||
env=api.vars.default_env)
|
||||
|
||||
# Copy results to swarming out dir.
|
||||
if api.vars.upload_perf_results:
|
||||
api.file.makedirs('perf_dir', api.vars.perf_data_dir)
|
||||
api.flavor.copy_directory_contents_to_host(
|
||||
api.flavor.device_dirs.perf_data_dir,
|
||||
api.vars.perf_data_dir)
|
||||
|
||||
|
||||
def RunSteps(api):
|
||||
api.core.setup()
|
||||
try:
|
||||
api.flavor.install_everything()
|
||||
perf_steps(api)
|
||||
finally:
|
||||
api.flavor.cleanup_steps()
|
||||
api.run.check_failure()
|
||||
api.perf.run()
|
||||
|
||||
|
||||
def GenTests(api):
|
||||
|
@ -7,17 +7,10 @@
|
||||
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'core',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/python',
|
||||
'recipe_engine/raw_io',
|
||||
'recipe_engine/step',
|
||||
'recipe_engine/time',
|
||||
'run',
|
||||
'flavor',
|
||||
'vars',
|
||||
'skpbench',
|
||||
]
|
||||
|
||||
|
||||
@ -32,84 +25,8 @@ TEST_BUILDERS = {
|
||||
}
|
||||
|
||||
|
||||
import calendar
|
||||
|
||||
|
||||
def _run(api, title, *cmd, **kwargs):
|
||||
return api.run(api.step, title, cmd=list(cmd),
|
||||
cwd=api.vars.skia_dir, **kwargs)
|
||||
|
||||
|
||||
def _adb(api, title, *cmd, **kwargs):
|
||||
if 'infra_step' not in kwargs:
|
||||
kwargs['infra_step'] = True
|
||||
return _run(api, title, 'adb', *cmd, **kwargs)
|
||||
|
||||
|
||||
def skpbench_steps(api):
|
||||
"""benchmark Skia using skpbench."""
|
||||
app = api.vars.skia_out.join(api.vars.configuration, 'skpbench')
|
||||
_adb(api, 'push skpbench', 'push', app, api.vars.android_bin_dir)
|
||||
|
||||
skpbench_dir = api.vars.slave_dir.join('skia', 'tools', 'skpbench')
|
||||
table = api.path.join(api.vars.swarming_out_dir, 'table')
|
||||
|
||||
config = 'gpu,esinst4'
|
||||
if 'Vulkan' in api.vars.builder_name:
|
||||
config = 'vk'
|
||||
|
||||
skpbench_args = [
|
||||
api.path.join(api.vars.android_bin_dir, 'skpbench'),
|
||||
api.path.join(api.vars.android_data_dir, 'skps'),
|
||||
'--adb',
|
||||
'--resultsfile', table,
|
||||
'--config', config]
|
||||
|
||||
api.run(api.python, 'skpbench',
|
||||
script=skpbench_dir.join('skpbench.py'),
|
||||
args=skpbench_args)
|
||||
|
||||
skiaperf_args = [
|
||||
table,
|
||||
'--properties',
|
||||
'gitHash', api.vars.got_revision,
|
||||
'build_number', api.vars.build_number,
|
||||
]
|
||||
|
||||
skiaperf_args.extend(['no_buildbot', 'True'])
|
||||
skiaperf_args.extend(['swarming_bot_id', api.vars.swarming_bot_id])
|
||||
skiaperf_args.extend(['swarming_task_id', api.vars.swarming_task_id])
|
||||
|
||||
now = api.time.utcnow()
|
||||
ts = int(calendar.timegm(now.utctimetuple()))
|
||||
api.file.makedirs('perf_dir', api.vars.perf_data_dir)
|
||||
json_path = api.path.join(
|
||||
api.vars.perf_data_dir,
|
||||
'skpbench_%s_%d.json' % (api.vars.got_revision, ts))
|
||||
|
||||
skiaperf_args.extend([
|
||||
'--outfile', json_path
|
||||
])
|
||||
|
||||
keys_blacklist = ['configuration', 'role', 'is_trybot']
|
||||
skiaperf_args.append('--key')
|
||||
for k in sorted(api.vars.builder_cfg.keys()):
|
||||
if not k in keys_blacklist:
|
||||
skiaperf_args.extend([k, api.vars.builder_cfg[k]])
|
||||
|
||||
api.run(api.python, 'Parse skpbench output into Perf json',
|
||||
script=skpbench_dir.join('skiaperf.py'),
|
||||
args=skiaperf_args)
|
||||
|
||||
|
||||
def RunSteps(api):
|
||||
api.core.setup()
|
||||
try:
|
||||
api.flavor.install(skps=True)
|
||||
skpbench_steps(api)
|
||||
finally:
|
||||
api.flavor.cleanup_steps()
|
||||
api.run.check_failure()
|
||||
api.skpbench.run()
|
||||
|
||||
|
||||
def GenTests(api):
|
||||
@ -137,4 +54,3 @@ def GenTests(api):
|
||||
)
|
||||
|
||||
yield test
|
||||
|
||||
|
@ -7,17 +7,11 @@
|
||||
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'core',
|
||||
'recipe_engine/json',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/platform',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/python',
|
||||
'recipe_engine/raw_io',
|
||||
'flavor',
|
||||
'run',
|
||||
'vars',
|
||||
'sktest',
|
||||
]
|
||||
|
||||
|
||||
@ -56,523 +50,8 @@ TEST_BUILDERS = {
|
||||
}
|
||||
|
||||
|
||||
def dm_flags(bot):
|
||||
args = []
|
||||
|
||||
# 32-bit desktop bots tend to run out of memory, because they have relatively
|
||||
# far more cores than RAM (e.g. 32 cores, 3G RAM). Hold them back a bit.
|
||||
if '-x86-' in bot and not 'NexusPlayer' in bot:
|
||||
args.extend('--threads 4'.split(' '))
|
||||
|
||||
# These are the canonical configs that we would ideally run on all bots. We
|
||||
# may opt out or substitute some below for specific bots
|
||||
configs = ['8888', 'srgb', 'gpu', 'gpudft', 'gpusrgb', 'pdf']
|
||||
# Add in either msaa4 or msaa16 to the canonical set of configs to run
|
||||
if 'Android' in bot or 'iOS' in bot:
|
||||
configs.append('msaa4')
|
||||
else:
|
||||
configs.append('msaa16')
|
||||
|
||||
# The NP produces a long error stream when we run with MSAA. The Tegra3 just
|
||||
# doesn't support it.
|
||||
if ('NexusPlayer' in bot or
|
||||
'Tegra3' in bot or
|
||||
# We aren't interested in fixing msaa bugs on iPad4.
|
||||
'iPad4' in bot or
|
||||
# skia:5792
|
||||
'iHD530' in bot or
|
||||
'IntelIris540' in bot):
|
||||
configs = [x for x in configs if 'msaa' not in x]
|
||||
|
||||
# The NP produces different images for dft on every run.
|
||||
if 'NexusPlayer' in bot:
|
||||
configs = [x for x in configs if 'gpudft' not in x]
|
||||
|
||||
# Runs out of memory on Android bots. Everyone else seems fine.
|
||||
if 'Android' in bot:
|
||||
configs.remove('pdf')
|
||||
|
||||
if '-GCE-' in bot:
|
||||
configs.extend(['565'])
|
||||
configs.extend(['f16'])
|
||||
configs.extend(['sp-8888', '2ndpic-8888']) # Test niche uses of SkPicture.
|
||||
configs.extend(['lite-8888']) # Experimental display list.
|
||||
|
||||
if '-TSAN' not in bot:
|
||||
if ('TegraK1' in bot or
|
||||
'TegraX1' in bot or
|
||||
'GTX550Ti' in bot or
|
||||
'GTX660' in bot or
|
||||
'GT610' in bot):
|
||||
if 'Android' in bot:
|
||||
configs.append('nvprdit4')
|
||||
else:
|
||||
configs.append('nvprdit16')
|
||||
|
||||
# We want to test the OpenGL config not the GLES config on the Shield
|
||||
if 'NVIDIA_Shield' in bot:
|
||||
configs = [x.replace('gpu', 'gl') for x in configs]
|
||||
configs = [x.replace('msaa', 'glmsaa') for x in configs]
|
||||
configs = [x.replace('nvpr', 'glnvpr') for x in configs]
|
||||
|
||||
# NP is running out of RAM when we run all these modes. skia:3255
|
||||
if 'NexusPlayer' not in bot:
|
||||
configs.extend(mode + '-8888' for mode in
|
||||
['serialize', 'tiles_rt', 'pic'])
|
||||
|
||||
# Test instanced rendering on a limited number of platforms
|
||||
if 'Nexus6' in bot:
|
||||
configs.append('esinst') # esinst4 isn't working yet on Adreno.
|
||||
elif 'NVIDIA_Shield' in bot:
|
||||
# Multisampled instanced configs use nvpr.
|
||||
configs = [x.replace('glnvpr', 'glinst') for x in configs]
|
||||
configs.append('glinst')
|
||||
elif 'PixelC' in bot:
|
||||
# Multisampled instanced configs use nvpr.
|
||||
configs = [x.replace('nvpr', 'esinst') for x in configs]
|
||||
configs.append('esinst')
|
||||
elif 'MacMini6.2' in bot:
|
||||
configs.extend(['glinst', 'glinst16'])
|
||||
|
||||
# CommandBuffer bot *only* runs the command_buffer config.
|
||||
if 'CommandBuffer' in bot:
|
||||
configs = ['commandbuffer']
|
||||
|
||||
# ANGLE bot *only* runs the angle configs
|
||||
if 'ANGLE' in bot:
|
||||
configs = ['angle_d3d11_es2',
|
||||
'angle_d3d9_es2',
|
||||
'angle_d3d11_es2_msaa4',
|
||||
'angle_gl_es2']
|
||||
|
||||
# Vulkan bot *only* runs the vk config.
|
||||
if 'Vulkan' in bot:
|
||||
configs = ['vk']
|
||||
|
||||
args.append('--config')
|
||||
args.extend(configs)
|
||||
|
||||
# Run tests, gms, and image decoding tests everywhere.
|
||||
args.extend('--src tests gm image colorImage svg'.split(' '))
|
||||
|
||||
if 'GalaxyS' in bot:
|
||||
args.extend(('--threads', '0'))
|
||||
|
||||
blacklisted = []
|
||||
def blacklist(quad):
|
||||
config, src, options, name = quad.split(' ') if type(quad) is str else quad
|
||||
if config == '_' or config in configs:
|
||||
blacklisted.extend([config, src, options, name])
|
||||
|
||||
# TODO: ???
|
||||
blacklist('f16 _ _ dstreadshuffle')
|
||||
blacklist('f16 image _ _')
|
||||
blacklist('gpusrgb image _ _')
|
||||
blacklist('glsrgb image _ _')
|
||||
|
||||
# Decoder tests are now performing gamma correct decodes. This means
|
||||
# that, when viewing the results, we need to perform a gamma correct
|
||||
# encode to PNG. Therefore, we run the image tests in srgb mode instead
|
||||
# of 8888.
|
||||
blacklist('8888 image _ _')
|
||||
|
||||
if 'Valgrind' in bot:
|
||||
# These take 18+ hours to run.
|
||||
blacklist('pdf gm _ fontmgr_iter')
|
||||
blacklist('pdf _ _ PANO_20121023_214540.jpg')
|
||||
blacklist('pdf skp _ worldjournal')
|
||||
blacklist('pdf skp _ desk_baidu.skp')
|
||||
blacklist('pdf skp _ desk_wikipedia.skp')
|
||||
blacklist('_ svg _ _')
|
||||
|
||||
if 'iOS' in bot:
|
||||
blacklist('gpu skp _ _')
|
||||
blacklist('msaa skp _ _')
|
||||
blacklist('msaa16 gm _ tilemodesProcess')
|
||||
|
||||
if 'Mac' in bot or 'iOS' in bot:
|
||||
# CG fails on questionable bmps
|
||||
blacklist('_ image gen_platf rgba32abf.bmp')
|
||||
blacklist('_ image gen_platf rgb24prof.bmp')
|
||||
blacklist('_ image gen_platf rgb24lprof.bmp')
|
||||
blacklist('_ image gen_platf 8bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 4bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 32bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 24bpp-pixeldata-cropped.bmp')
|
||||
|
||||
# CG has unpredictable behavior on this questionable gif
|
||||
# It's probably using uninitialized memory
|
||||
blacklist('_ image gen_platf frame_larger_than_image.gif')
|
||||
|
||||
# CG has unpredictable behavior on incomplete pngs
|
||||
# skbug.com/5774
|
||||
blacklist('_ image gen_platf inc0.png')
|
||||
blacklist('_ image gen_platf inc1.png')
|
||||
blacklist('_ image gen_platf inc2.png')
|
||||
blacklist('_ image gen_platf inc3.png')
|
||||
blacklist('_ image gen_platf inc4.png')
|
||||
blacklist('_ image gen_platf inc5.png')
|
||||
blacklist('_ image gen_platf inc6.png')
|
||||
blacklist('_ image gen_platf inc7.png')
|
||||
blacklist('_ image gen_platf inc8.png')
|
||||
blacklist('_ image gen_platf inc9.png')
|
||||
blacklist('_ image gen_platf inc10.png')
|
||||
blacklist('_ image gen_platf inc11.png')
|
||||
blacklist('_ image gen_platf inc12.png')
|
||||
blacklist('_ image gen_platf inc13.png')
|
||||
blacklist('_ image gen_platf inc14.png')
|
||||
|
||||
# WIC fails on questionable bmps
|
||||
if 'Win' in bot:
|
||||
blacklist('_ image gen_platf rle8-height-negative.bmp')
|
||||
blacklist('_ image gen_platf rle4-height-negative.bmp')
|
||||
blacklist('_ image gen_platf pal8os2v2.bmp')
|
||||
blacklist('_ image gen_platf pal8os2v2-16.bmp')
|
||||
blacklist('_ image gen_platf rgba32abf.bmp')
|
||||
blacklist('_ image gen_platf rgb24prof.bmp')
|
||||
blacklist('_ image gen_platf rgb24lprof.bmp')
|
||||
blacklist('_ image gen_platf 8bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 4bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 32bpp-pixeldata-cropped.bmp')
|
||||
blacklist('_ image gen_platf 24bpp-pixeldata-cropped.bmp')
|
||||
if 'x86_64' in bot and 'CPU' in bot:
|
||||
# This GM triggers a SkSmallAllocator assert.
|
||||
blacklist('_ gm _ composeshader_bitmap')
|
||||
|
||||
if 'Android' in bot or 'iOS' in bot:
|
||||
# This test crashes the N9 (perhaps because of large malloc/frees). It also
|
||||
# is fairly slow and not platform-specific. So we just disable it on all of
|
||||
# Android and iOS. skia:5438
|
||||
blacklist('_ test _ GrShape')
|
||||
|
||||
if 'Win8' in bot:
|
||||
# bungeman: "Doesn't work on Windows anyway, produces unstable GMs with
|
||||
# 'Unexpected error' from DirectWrite"
|
||||
blacklist('_ gm _ fontscalerdistortable')
|
||||
# skia:5636
|
||||
blacklist('_ svg _ Nebraska-StateSeal.svg')
|
||||
|
||||
# skia:4095
|
||||
bad_serialize_gms = ['bleed_image',
|
||||
'c_gms',
|
||||
'colortype',
|
||||
'colortype_xfermodes',
|
||||
'drawfilter',
|
||||
'fontmgr_bounds_0.75_0',
|
||||
'fontmgr_bounds_1_-0.25',
|
||||
'fontmgr_bounds',
|
||||
'fontmgr_match',
|
||||
'fontmgr_iter',
|
||||
'imagemasksubset']
|
||||
|
||||
# skia:5589
|
||||
bad_serialize_gms.extend(['bitmapfilters',
|
||||
'bitmapshaders',
|
||||
'bleed',
|
||||
'bleed_alpha_bmp',
|
||||
'bleed_alpha_bmp_shader',
|
||||
'convex_poly_clip',
|
||||
'extractalpha',
|
||||
'filterbitmap_checkerboard_32_32_g8',
|
||||
'filterbitmap_image_mandrill_64',
|
||||
'shadows',
|
||||
'simpleaaclip_aaclip'])
|
||||
# skia:5595
|
||||
bad_serialize_gms.extend(['composeshader_bitmap',
|
||||
'scaled_tilemodes_npot',
|
||||
'scaled_tilemodes'])
|
||||
|
||||
# skia:5778
|
||||
bad_serialize_gms.append('typefacerendering_pfaMac')
|
||||
# skia:5942
|
||||
bad_serialize_gms.append('parsedpaths')
|
||||
|
||||
# these use a custom image generator which doesn't serialize
|
||||
bad_serialize_gms.append('ImageGeneratorExternal_rect')
|
||||
bad_serialize_gms.append('ImageGeneratorExternal_shader')
|
||||
|
||||
for test in bad_serialize_gms:
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
|
||||
if 'Mac' not in bot:
|
||||
for test in ['bleed_alpha_image', 'bleed_alpha_image_shader']:
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
# It looks like we skip these only for out-of-memory concerns.
|
||||
if 'Win' in bot or 'Android' in bot:
|
||||
for test in ['verylargebitmap', 'verylarge_picture_image']:
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
|
||||
# skia:4769
|
||||
for test in ['drawfilter']:
|
||||
blacklist([ 'sp-8888', 'gm', '_', test])
|
||||
blacklist([ 'pic-8888', 'gm', '_', test])
|
||||
blacklist(['2ndpic-8888', 'gm', '_', test])
|
||||
blacklist([ 'lite-8888', 'gm', '_', test])
|
||||
# skia:4703
|
||||
for test in ['image-cacherator-from-picture',
|
||||
'image-cacherator-from-raster',
|
||||
'image-cacherator-from-ctable']:
|
||||
blacklist([ 'sp-8888', 'gm', '_', test])
|
||||
blacklist([ 'pic-8888', 'gm', '_', test])
|
||||
blacklist([ '2ndpic-8888', 'gm', '_', test])
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
|
||||
# GM that requires raster-backed canvas
|
||||
for test in ['gamut', 'complexclip4_bw', 'complexclip4_aa']:
|
||||
blacklist([ 'sp-8888', 'gm', '_', test])
|
||||
blacklist([ 'pic-8888', 'gm', '_', test])
|
||||
blacklist([ 'lite-8888', 'gm', '_', test])
|
||||
blacklist([ '2ndpic-8888', 'gm', '_', test])
|
||||
blacklist(['serialize-8888', 'gm', '_', test])
|
||||
|
||||
# GM that not support tiles_rt
|
||||
for test in ['complexclip4_bw', 'complexclip4_aa']:
|
||||
blacklist([ 'tiles_rt-8888', 'gm', '_', test])
|
||||
|
||||
# Extensions for RAW images
|
||||
r = ["arw", "cr2", "dng", "nef", "nrw", "orf", "raf", "rw2", "pef", "srw",
|
||||
"ARW", "CR2", "DNG", "NEF", "NRW", "ORF", "RAF", "RW2", "PEF", "SRW"]
|
||||
|
||||
# skbug.com/4888
|
||||
# Blacklist RAW images (and a few large PNGs) on GPU bots
|
||||
# until we can resolve failures
|
||||
if 'GPU' in bot:
|
||||
blacklist('_ image _ interlaced1.png')
|
||||
blacklist('_ image _ interlaced2.png')
|
||||
blacklist('_ image _ interlaced3.png')
|
||||
for raw_ext in r:
|
||||
blacklist('_ image _ .%s' % raw_ext)
|
||||
|
||||
# Large image that overwhelms older Mac bots
|
||||
if 'MacMini4.1-GPU' in bot:
|
||||
blacklist('_ image _ abnormal.wbmp')
|
||||
blacklist(['msaa16', 'gm', '_', 'blurcircles'])
|
||||
|
||||
if 'Nexus5' in bot:
|
||||
# skia:5876
|
||||
blacklist(['msaa4', 'gm', '_', 'encode-platform'])
|
||||
|
||||
match = []
|
||||
if 'Valgrind' in bot: # skia:3021
|
||||
match.append('~Threaded')
|
||||
|
||||
if 'AndroidOne' in bot: # skia:4711
|
||||
match.append('~WritePixels')
|
||||
|
||||
if 'NexusPlayer' in bot:
|
||||
match.append('~ResourceCache')
|
||||
|
||||
if 'Nexus10' in bot:
|
||||
match.append('~CopySurface') # skia:5509
|
||||
match.append('~SRGBReadWritePixels') # skia:6097
|
||||
|
||||
if 'ANGLE' in bot and 'Debug' in bot:
|
||||
match.append('~GLPrograms') # skia:4717
|
||||
|
||||
if 'MSAN' in bot:
|
||||
match.extend(['~Once', '~Shared']) # Not sure what's up with these tests.
|
||||
|
||||
if 'TSAN' in bot:
|
||||
match.extend(['~ReadWriteAlpha']) # Flaky on TSAN-covered on nvidia bots.
|
||||
match.extend(['~RGBA4444TextureTest', # Flakier than they are important.
|
||||
'~RGB565TextureTest'])
|
||||
|
||||
if 'Vulkan' in bot and 'Adreno' in bot:
|
||||
# skia:5777
|
||||
match.extend(['~XfermodeImageFilterCroppedInput',
|
||||
'~GrTextureStripAtlasFlush',
|
||||
'~CopySurface'])
|
||||
|
||||
if 'Vulkan' in bot and 'GTX1070' in bot and 'Win' in bot:
|
||||
# skia:6092
|
||||
match.append('~GPUMemorySize')
|
||||
|
||||
if 'IntelIris540' in bot and 'ANGLE' in bot:
|
||||
match.append('~IntTexture') # skia:6086
|
||||
|
||||
if blacklisted:
|
||||
args.append('--blacklist')
|
||||
args.extend(blacklisted)
|
||||
|
||||
if match:
|
||||
args.append('--match')
|
||||
args.extend(match)
|
||||
|
||||
# These bots run out of memory running RAW codec tests. Do not run them in
|
||||
# parallel
|
||||
if ('NexusPlayer' in bot or 'Nexus5' in bot or 'Nexus9' in bot
|
||||
or 'Win8-MSVC-ShuttleB' in bot):
|
||||
args.append('--noRAW_threading')
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def key_params(api):
|
||||
"""Build a unique key from the builder name (as a list).
|
||||
|
||||
E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6
|
||||
"""
|
||||
# Don't bother to include role, which is always Test.
|
||||
# TryBots are uploaded elsewhere so they can use the same key.
|
||||
blacklist = ['role', 'is_trybot']
|
||||
|
||||
flat = []
|
||||
for k in sorted(api.vars.builder_cfg.keys()):
|
||||
if k not in blacklist:
|
||||
flat.append(k)
|
||||
flat.append(api.vars.builder_cfg[k])
|
||||
return flat
|
||||
|
||||
|
||||
def test_steps(api):
|
||||
"""Run the DM test."""
|
||||
use_hash_file = False
|
||||
if api.vars.upload_dm_results:
|
||||
# This must run before we write anything into
|
||||
# api.flavor.device_dirs.dm_dir or we may end up deleting our
|
||||
# output on machines where they're the same.
|
||||
api.flavor.create_clean_host_dir(api.vars.dm_dir)
|
||||
host_dm_dir = str(api.vars.dm_dir)
|
||||
device_dm_dir = str(api.flavor.device_dirs.dm_dir)
|
||||
if host_dm_dir != device_dm_dir:
|
||||
api.flavor.create_clean_device_dir(device_dm_dir)
|
||||
|
||||
# Obtain the list of already-generated hashes.
|
||||
hash_filename = 'uninteresting_hashes.txt'
|
||||
|
||||
# Ensure that the tmp_dir exists.
|
||||
api.run.run_once(api.file.makedirs,
|
||||
'tmp_dir',
|
||||
api.vars.tmp_dir,
|
||||
infra_step=True)
|
||||
|
||||
host_hashes_file = api.vars.tmp_dir.join(hash_filename)
|
||||
hashes_file = api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.tmp_dir, hash_filename)
|
||||
api.run(
|
||||
api.python.inline,
|
||||
'get uninteresting hashes',
|
||||
program="""
|
||||
import contextlib
|
||||
import math
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
HASHES_URL = 'https://gold.skia.org/_/hashes'
|
||||
RETRIES = 5
|
||||
TIMEOUT = 60
|
||||
WAIT_BASE = 15
|
||||
|
||||
socket.setdefaulttimeout(TIMEOUT)
|
||||
for retry in range(RETRIES):
|
||||
try:
|
||||
with contextlib.closing(
|
||||
urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:
|
||||
hashes = w.read()
|
||||
with open(sys.argv[1], 'w') as f:
|
||||
f.write(hashes)
|
||||
break
|
||||
except Exception as e:
|
||||
print 'Failed to get uninteresting hashes from %s:' % HASHES_URL
|
||||
print e
|
||||
if retry == RETRIES:
|
||||
raise
|
||||
waittime = WAIT_BASE * math.pow(2, retry)
|
||||
print 'Retry in %d seconds.' % waittime
|
||||
time.sleep(waittime)
|
||||
""",
|
||||
args=[host_hashes_file],
|
||||
cwd=api.vars.skia_dir,
|
||||
abort_on_failure=False,
|
||||
fail_build_on_failure=False,
|
||||
infra_step=True)
|
||||
|
||||
if api.path.exists(host_hashes_file):
|
||||
api.flavor.copy_file_to_device(host_hashes_file, hashes_file)
|
||||
use_hash_file = True
|
||||
|
||||
# Run DM.
|
||||
properties = [
|
||||
'gitHash', api.vars.got_revision,
|
||||
'master', api.vars.master_name,
|
||||
'builder', api.vars.builder_name,
|
||||
'build_number', api.vars.build_number,
|
||||
]
|
||||
if api.vars.is_trybot:
|
||||
properties.extend([
|
||||
'issue', api.vars.issue,
|
||||
'patchset', api.vars.patchset,
|
||||
'patch_storage', api.vars.patch_storage,
|
||||
])
|
||||
if api.vars.no_buildbot:
|
||||
properties.extend(['no_buildbot', 'True'])
|
||||
properties.extend(['swarming_bot_id', api.vars.swarming_bot_id])
|
||||
properties.extend(['swarming_task_id', api.vars.swarming_task_id])
|
||||
|
||||
args = [
|
||||
'dm',
|
||||
'--undefok', # This helps branches that may not know new flags.
|
||||
'--resourcePath', api.flavor.device_dirs.resource_dir,
|
||||
'--skps', api.flavor.device_dirs.skp_dir,
|
||||
'--images', api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.images_dir, 'dm'),
|
||||
'--colorImages', api.flavor.device_path_join(
|
||||
api.flavor.device_dirs.images_dir, 'colorspace'),
|
||||
'--nameByHash',
|
||||
'--properties'
|
||||
] + properties
|
||||
|
||||
args.extend(['--svgs', api.flavor.device_dirs.svg_dir])
|
||||
|
||||
args.append('--key')
|
||||
args.extend(key_params(api))
|
||||
if use_hash_file:
|
||||
args.extend(['--uninterestingHashesFile', hashes_file])
|
||||
if api.vars.upload_dm_results:
|
||||
args.extend(['--writePath', api.flavor.device_dirs.dm_dir])
|
||||
|
||||
skip_flag = None
|
||||
if api.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
||||
skip_flag = '--nogpu'
|
||||
elif api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
||||
skip_flag = '--nocpu'
|
||||
if skip_flag:
|
||||
args.append(skip_flag)
|
||||
args.extend(dm_flags(api.vars.builder_name))
|
||||
|
||||
api.run(api.flavor.step, 'dm', cmd=args,
|
||||
abort_on_failure=False,
|
||||
env=api.vars.default_env)
|
||||
|
||||
if api.vars.upload_dm_results:
|
||||
# Copy images and JSON to host machine if needed.
|
||||
api.flavor.copy_directory_contents_to_host(
|
||||
api.flavor.device_dirs.dm_dir, api.vars.dm_dir)
|
||||
|
||||
# See skia:2789.
|
||||
if ('Valgrind' in api.vars.builder_name and
|
||||
api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
||||
abandonGpuContext = list(args)
|
||||
abandonGpuContext.append('--abandonGpuContext')
|
||||
api.run(api.flavor.step, 'dm --abandonGpuContext',
|
||||
cmd=abandonGpuContext, abort_on_failure=False)
|
||||
preAbandonGpuContext = list(args)
|
||||
preAbandonGpuContext.append('--preAbandonGpuContext')
|
||||
api.run(api.flavor.step, 'dm --preAbandonGpuContext',
|
||||
cmd=preAbandonGpuContext, abort_on_failure=False,
|
||||
env=api.vars.default_env)
|
||||
|
||||
|
||||
def RunSteps(api):
|
||||
api.core.setup()
|
||||
try:
|
||||
api.flavor.install_everything()
|
||||
test_steps(api)
|
||||
finally:
|
||||
api.flavor.cleanup_steps()
|
||||
api.run.check_failure()
|
||||
api.sktest.run()
|
||||
|
||||
|
||||
def GenTests(api):
|
||||
|
@ -7,96 +7,13 @@
|
||||
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'recipe_engine/json',
|
||||
'recipe_engine/path',
|
||||
'upload_dm_results',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/shutil',
|
||||
'recipe_engine/step',
|
||||
'recipe_engine/time',
|
||||
]
|
||||
|
||||
|
||||
import calendar
|
||||
|
||||
|
||||
DM_JSON = 'dm.json'
|
||||
GS_BUCKET = 'gs://skia-infra-gm'
|
||||
UPLOAD_ATTEMPTS = 5
|
||||
VERBOSE_LOG = 'verbose.log'
|
||||
|
||||
|
||||
def cp(api, name, src, dst, extra_args=None):
|
||||
cmd = ['gsutil', 'cp']
|
||||
if extra_args:
|
||||
cmd.extend(extra_args)
|
||||
cmd.extend([src, dst])
|
||||
|
||||
name = 'upload %s' % name
|
||||
for i in xrange(UPLOAD_ATTEMPTS):
|
||||
step_name = name
|
||||
if i > 0:
|
||||
step_name += ' (attempt %d)' % (i+1)
|
||||
try:
|
||||
api.step(step_name, cmd=cmd)
|
||||
break
|
||||
except api.step.StepFailure:
|
||||
if i == UPLOAD_ATTEMPTS - 1:
|
||||
raise
|
||||
|
||||
|
||||
def RunSteps(api):
|
||||
builder_name = api.properties['buildername']
|
||||
revision = api.properties['revision']
|
||||
|
||||
results_dir = api.path['start_dir'].join('dm')
|
||||
|
||||
# Move dm.json and verbose.log to their own directory.
|
||||
json_file = results_dir.join(DM_JSON)
|
||||
log_file = results_dir.join(VERBOSE_LOG)
|
||||
tmp_dir = api.path['start_dir'].join('tmp_upload')
|
||||
api.shutil.makedirs('tmp dir', tmp_dir, infra_step=True)
|
||||
api.shutil.copy('copy dm.json', json_file, tmp_dir)
|
||||
api.shutil.copy('copy verbose.log', log_file, tmp_dir)
|
||||
api.shutil.remove('rm old dm.json', json_file)
|
||||
api.shutil.remove('rm old verbose.log', log_file)
|
||||
|
||||
# Upload the images.
|
||||
image_dest_path = '/'.join((GS_BUCKET, 'dm-images-v1'))
|
||||
files_to_upload = api.file.glob(
|
||||
'find images',
|
||||
results_dir.join('*'),
|
||||
test_data=['someimage.png'],
|
||||
infra_step=True)
|
||||
if len(files_to_upload) > 0:
|
||||
cp(api, 'images', results_dir.join('*'), image_dest_path)
|
||||
|
||||
# Upload the JSON summary and verbose.log.
|
||||
now = api.time.utcnow()
|
||||
summary_dest_path = '/'.join([
|
||||
'dm-json-v1',
|
||||
str(now.year ).zfill(4),
|
||||
str(now.month).zfill(2),
|
||||
str(now.day ).zfill(2),
|
||||
str(now.hour ).zfill(2),
|
||||
revision,
|
||||
builder_name,
|
||||
str(int(calendar.timegm(now.utctimetuple())))])
|
||||
|
||||
# Trybot results are further siloed by issue/patchset.
|
||||
issue = str(api.properties.get('issue', ''))
|
||||
patchset = str(api.properties.get('patchset', ''))
|
||||
if api.properties.get('patch_storage', '') == 'gerrit':
|
||||
issue = str(api.properties['patch_issue'])
|
||||
patchset = str(api.properties['patch_set'])
|
||||
if issue and patchset:
|
||||
summary_dest_path = '/'.join((
|
||||
'trybot', summary_dest_path, issue, patchset))
|
||||
|
||||
summary_dest_path = '/'.join((GS_BUCKET, summary_dest_path))
|
||||
|
||||
cp(api, 'JSON and logs', tmp_dir.join('*'), summary_dest_path,
|
||||
['-z', 'json,log'])
|
||||
api.upload_dm_results.run()
|
||||
|
||||
|
||||
def GenTests(api):
|
||||
|
@ -7,50 +7,13 @@
|
||||
|
||||
|
||||
DEPS = [
|
||||
'build/file',
|
||||
'recipe_engine/path',
|
||||
'recipe_engine/properties',
|
||||
'recipe_engine/step',
|
||||
'recipe_engine/time',
|
||||
'upload_nano_results',
|
||||
]
|
||||
|
||||
|
||||
def RunSteps(api):
|
||||
# Upload the nanobench resuls.
|
||||
builder_name = api.properties['buildername']
|
||||
|
||||
now = api.time.utcnow()
|
||||
src_path = api.path['start_dir'].join(
|
||||
'perfdata', builder_name, 'data')
|
||||
results = api.file.glob(
|
||||
'find results',
|
||||
'*.json',
|
||||
cwd=src_path,
|
||||
test_data=['nanobench_abc123.json'],
|
||||
infra_step=True)
|
||||
if len(results) != 1: # pragma: nocover
|
||||
raise Exception('Unable to find nanobench or skpbench JSON file!')
|
||||
|
||||
src = src_path.join(results[0])
|
||||
basename = api.path.basename(src)
|
||||
gs_path = '/'.join((
|
||||
'nano-json-v1', str(now.year).zfill(4),
|
||||
str(now.month).zfill(2), str(now.day).zfill(2), str(now.hour).zfill(2),
|
||||
builder_name))
|
||||
|
||||
issue = str(api.properties.get('issue', ''))
|
||||
patchset = str(api.properties.get('patchset', ''))
|
||||
if api.properties.get('patch_storage', '') == 'gerrit':
|
||||
issue = str(api.properties['patch_issue'])
|
||||
patchset = str(api.properties['patch_set'])
|
||||
if issue and patchset:
|
||||
gs_path = '/'.join(('trybot', gs_path, issue, patchset))
|
||||
|
||||
dst = '/'.join(('gs://skia-perf', gs_path, basename))
|
||||
|
||||
api.step('upload',
|
||||
cmd=['gsutil', 'cp', '-a', 'public-read', '-z', 'json', src, dst],
|
||||
infra_step=True)
|
||||
api.upload_nano_results.run()
|
||||
|
||||
|
||||
def GenTests(api):
|
||||
|
Loading…
Reference in New Issue
Block a user