be2062c430
Changed the python script to operate on all shaders in the directory, handling SPIR-V appropriately, and collating data across backends. Made the output CSV, so that it imports into spreadsheets directly (and handles missing columns cleanly). Removed all the JSON digest logic - this was overkill at the moment, and it made it tricky once we were combining information from GL and Vulkan. Also, the hit count data was probably misleading - it didn't count hits at the program cache level, just the persistent cache. Change-Id: If354cde943c96f84e7bcc20a137afefca3b59358 Reviewed-on: https://skia-review.googlesource.com/c/skia/+/207960 Commit-Queue: Brian Osman <brianosman@google.com> Reviewed-by: Greg Daniel <egdaniel@google.com>
41 lines
1.1 KiB
Python
41 lines
1.1 KiB
Python
# Copyright 2019 The Chromium Authors. All rights reserved.
|
|
# Use of this source code is governed by a BSD-style license that can be
|
|
# found in the LICENSE file.
|
|
|
|
import json
|
|
import os
|
|
import subprocess
|
|
import sys
|
|
|
|
if len(sys.argv) != 3:
|
|
print sys.argv[0], ' <compiler> <folder>'
|
|
sys.exit(1)
|
|
|
|
compiler = sys.argv[1]
|
|
folder = sys.argv[2]
|
|
|
|
stats = {}
|
|
|
|
for filename in os.listdir(folder):
|
|
basename, ext = os.path.splitext(filename)
|
|
if ext not in ['.frag', '.spv']:
|
|
continue
|
|
cmdline = [compiler]
|
|
if ext == '.spv':
|
|
cmdline.extend(['-f', '-p'])
|
|
cmdline.append(os.path.join(folder, filename))
|
|
try:
|
|
output = subprocess.check_output(cmdline)
|
|
except subprocess.CalledProcessError:
|
|
continue
|
|
stats.setdefault(basename, {})
|
|
for line in output.splitlines():
|
|
if line.startswith('Instructions Emitted'):
|
|
inst = line.split(':')[1].split()
|
|
stats[basename][ext] = inst
|
|
|
|
for k, v in stats.iteritems():
|
|
gl = v.get('.frag', ['', '', ''])
|
|
vk = v.get('.spv', ['', '', ''])
|
|
print '{0},{1},{2},{3},{4},{5},{6}'.format(k, gl[0], gl[1], gl[2], vk[0], vk[1], vk[2])
|