Delete unused upload_*_results.py

BUG=skia:5719
GOLD_TRYBOT_URL= https://gold.skia.org/search?issue=2371883004

Review-Url: https://codereview.chromium.org/2371883004
This commit is contained in:
borenet 2016-09-27 07:51:15 -07:00 committed by Commit bot
parent 5c2310c86d
commit cefee07bf7
2 changed files with 0 additions and 166 deletions

View File

@ -1,68 +0,0 @@
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Upload benchmark performance data results. """
import gzip
import os
import os.path
import re
import subprocess
import sys
import tempfile
from datetime import datetime
def _UploadJSONResults(builder_name, build_number, dest_gsbase, gs_subdir,
full_json_path, gzipped=True, gsutil_path='gsutil',
issue_number=None):
now = datetime.utcnow()
gs_json_path = '/'.join((str(now.year).zfill(4), str(now.month).zfill(2),
str(now.day).zfill(2), str(now.hour).zfill(2)))
gs_dir = '/'.join((gs_subdir, gs_json_path, builder_name))
if builder_name.endswith('-Trybot'):
if not issue_number:
raise Exception('issue_number build property is missing!')
gs_dir = '/'.join(('trybot', gs_dir, build_number, issue_number))
full_path_to_upload = full_json_path
file_to_upload = os.path.basename(full_path_to_upload)
gzip_args = []
if gzipped:
gzip_args = ['-z', 'json']
cmd = ['python', gsutil_path, 'cp', '-a', 'public-read']
cmd.extend(gzip_args)
cmd.extend([full_path_to_upload,
'/'.join((dest_gsbase, gs_dir, file_to_upload))])
print ' '.join(cmd)
subprocess.check_call(cmd)
def main(builder_name, build_number, perf_data_dir, got_revision, gsutil_path,
issue_number=None):
"""Uploads gzipped nanobench JSON data."""
# Find the nanobench JSON
file_list = os.listdir(perf_data_dir)
RE_FILE_SEARCH = re.compile(
'nanobench_({})_[0-9]+\.json'.format(got_revision))
nanobench_name = None
for file_name in file_list:
if RE_FILE_SEARCH.search(file_name):
nanobench_name = file_name
break
if nanobench_name:
dest_gsbase = 'gs://skia-perf'
nanobench_json_file = os.path.join(perf_data_dir,
nanobench_name)
_UploadJSONResults(builder_name, build_number, dest_gsbase, 'nano-json-v1',
nanobench_json_file, gsutil_path=gsutil_path,
issue_number=issue_number)
if __name__ == '__main__':
main(*sys.argv[1:])

View File

@ -1,98 +0,0 @@
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Upload DM output PNG files and JSON summary to Google Storage."""
import datetime
import json
import os
import shutil
import sys
import tempfile
def main(dm_dir, git_hash, builder_name, build_number, try_issue, import_path):
"""Upload DM output PNG files and JSON summary to Google Storage.
dm_dir: path to PNG files and JSON summary (str)
git_hash: this build's Git hash (str)
builder_name: name of this builder (str)
build_number: nth build on this builder (str or int)
try_issue: Rietveld issue if this is a try job (str, int, or None)
import_path: Path to import the gs_utils package (str)
"""
# import gs_utils
sys.path.insert(0, import_path)
import gs_utils
# Private, but Google-readable.
ACL = gs_utils.GSUtils.PredefinedACL.PRIVATE
FINE_ACLS = [(
gs_utils.GSUtils.IdType.GROUP_BY_DOMAIN,
'google.com',
gs_utils.GSUtils.Permission.READ
)]
# Move dm.json and verbose.log to their own directory for easy upload.
tmp = tempfile.mkdtemp()
shutil.move(os.path.join(dm_dir, 'dm.json'),
os.path.join(tmp, 'dm.json'))
shutil.move(os.path.join(dm_dir, 'verbose.log'),
os.path.join(tmp, 'verbose.log'))
# Make sure the JSON file parses correctly.
json_file_name = os.path.join(tmp, 'dm.json')
with open(json_file_name) as jsonFile:
try:
json.load(jsonFile)
except ValueError:
json_content = open(json_file_name).read()
print >> sys.stderr, "Invalid JSON: \n\n%s\n" % json_content
raise
# Only images are left in dm_dir. Upload any new ones.
gs = gs_utils.GSUtils()
bucket, image_dest_dir = 'chromium-skia-gm', 'dm-images-v1'
print 'Uploading images to gs://' + bucket + '/' + image_dest_dir
gs.upload_dir_contents(dm_dir,
bucket,
image_dest_dir,
upload_if = gs.UploadIf.ALWAYS,
predefined_acl = ACL,
fine_grained_acl_list = FINE_ACLS)
# /dm-json-v1/year/month/day/hour/git-hash/builder/build-number/dm.json
now = datetime.datetime.utcnow()
summary_dest_dir = '/'.join(['dm-json-v1',
str(now.year ).zfill(4),
str(now.month).zfill(2),
str(now.day ).zfill(2),
str(now.hour ).zfill(2),
git_hash,
builder_name,
str(build_number)])
# Trybot results are further siloed by CL.
if try_issue:
summary_dest_dir = '/'.join(['trybot', summary_dest_dir, str(try_issue)])
# Upload the JSON summary and verbose.log.
print 'Uploading logs to gs://' + bucket + '/' + summary_dest_dir
gs.upload_dir_contents(tmp,
bucket,
summary_dest_dir,
predefined_acl = ACL,
fine_grained_acl_list = FINE_ACLS)
# Just for hygiene, put dm.json and verbose.log back.
shutil.move(os.path.join(tmp, 'dm.json'),
os.path.join(dm_dir, 'dm.json'))
shutil.move(os.path.join(tmp, 'verbose.log'),
os.path.join(dm_dir, 'verbose.log'))
os.rmdir(tmp)
if '__main__' == __name__:
main(*sys.argv[1:])