[perf_skottiewasm_lottieweb] Use the new "--use_gpu" flag for GPU bots
* Also use DISPLAY=:0 for non-headless chrome via puppeeteer. * Output gpu-gl-renderer, gpu-driver, gpu-gl-vendor from the trace file. Will be useful for debugging. NoTry: true Bug: skia:9237 Change-Id: Ieb70dbe540aeca89e88dbaeace3cdba1b50eb7ef Reviewed-on: https://skia-review.googlesource.com/c/skia/+/227156 Reviewed-by: Florin Malita <fmalita@chromium.org> Commit-Queue: Ravi Mistry <rmistry@google.com>
This commit is contained in:
parent
4c5edc29fa
commit
690e899cff
@ -138,6 +138,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/lottie-web-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -147,7 +148,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie1.json",
|
||||
"/path/to/tmp/json",
|
||||
"lottie-web"
|
||||
@ -174,6 +175,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
@ -258,6 +265,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/lottie-web-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -267,7 +275,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie2.json",
|
||||
"/path/to/tmp/json",
|
||||
"lottie-web"
|
||||
@ -294,6 +302,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
@ -378,6 +392,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/lottie-web-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -387,7 +402,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie3.json",
|
||||
"/path/to/tmp/json",
|
||||
"lottie-web"
|
||||
@ -414,6 +429,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
|
@ -140,6 +140,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/lottie-web-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -149,7 +150,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie1.json",
|
||||
"/path/to/tmp/json",
|
||||
"lottie-web"
|
||||
@ -176,6 +177,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
@ -260,6 +267,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/lottie-web-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -269,7 +277,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie2.json",
|
||||
"/path/to/tmp/json",
|
||||
"lottie-web"
|
||||
@ -296,6 +304,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
@ -380,6 +394,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/lottie-web-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -389,7 +404,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie3.json",
|
||||
"/path/to/tmp/json",
|
||||
"lottie-web"
|
||||
@ -416,6 +431,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
|
@ -142,6 +142,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -151,7 +152,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie1.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
@ -178,6 +179,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
@ -266,6 +273,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -275,7 +283,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie2.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
@ -302,6 +310,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
@ -390,6 +404,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -399,7 +414,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie3.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
@ -426,6 +441,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
|
@ -0,0 +1,600 @@
|
||||
[
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"RECIPE_MODULE[recipe_engine::file]/resources/fileutil.py",
|
||||
"--json-output",
|
||||
"/path/to/tmp/json",
|
||||
"ensure-directory",
|
||||
"--mode",
|
||||
"0777",
|
||||
"[START_DIR]/cache/work"
|
||||
],
|
||||
"infra_step": true,
|
||||
"name": "makedirs checkout_path"
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"RECIPE_MODULE[recipe_engine::file]/resources/fileutil.py",
|
||||
"--json-output",
|
||||
"/path/to/tmp/json",
|
||||
"remove",
|
||||
"[START_DIR]/cache/work/.gclient_entries"
|
||||
],
|
||||
"infra_step": true,
|
||||
"name": "remove [START_DIR]/cache/work/.gclient_entries"
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"RECIPE_MODULE[depot_tools::bot_update]/resources/bot_update.py",
|
||||
"--spec-path",
|
||||
"cache_dir = '[START_DIR]/cache/git'\nsolutions = [{'deps_file': '.DEPS.git', 'managed': False, 'name': 'skia', 'url': 'https://skia.googlesource.com/skia.git'}]",
|
||||
"--patch_root",
|
||||
"skia",
|
||||
"--revision_mapping_file",
|
||||
"{\"got_revision\": \"skia\"}",
|
||||
"--git-cache-dir",
|
||||
"[START_DIR]/cache/git",
|
||||
"--cleanup-dir",
|
||||
"[CLEANUP]/bot_update",
|
||||
"--output_json",
|
||||
"/path/to/tmp/json",
|
||||
"--revision",
|
||||
"skia@abc123"
|
||||
],
|
||||
"cwd": "[START_DIR]/cache/work",
|
||||
"env_prefixes": {
|
||||
"PATH": [
|
||||
"RECIPE_REPO[depot_tools]"
|
||||
]
|
||||
},
|
||||
"infra_step": true,
|
||||
"name": "bot_update",
|
||||
"~followup_annotations": [
|
||||
"@@@STEP_TEXT@Some step text@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@{@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"skia\": \"abc123\"@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ }, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"manifest\": {@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"skia\": {@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"repository\": \"https://fake.org/skia.git\", @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"revision\": \"9046e2e693bb92a76e972b694580e5d17ad10748\"@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ }@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ }, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"skia\", @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"got_revision\": \"9046e2e693bb92a76e972b694580e5d17ad10748\", @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"got_revision_cp\": \"refs/heads/master@{#164710}\"@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ }, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"root\": \"skia\", @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"source_manifest\": {@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"directories\": {@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"skia\": {@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"git_checkout\": {@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"repo_url\": \"https://fake.org/skia.git\", @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"revision\": \"9046e2e693bb92a76e972b694580e5d17ad10748\"@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ }@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ }@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ }, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"version\": 0@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ }, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@}@@@",
|
||||
"@@@STEP_LOG_END@json.output@@@",
|
||||
"@@@SET_BUILD_PROPERTY@got_revision@\"9046e2e693bb92a76e972b694580e5d17ad10748\"@@@",
|
||||
"@@@SET_BUILD_PROPERTY@got_revision_cp@\"refs/heads/master@{#164710}\"@@@"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"RECIPE_MODULE[recipe_engine::file]/resources/fileutil.py",
|
||||
"--json-output",
|
||||
"/path/to/tmp/json",
|
||||
"listdir",
|
||||
"[START_DIR]/lottie-samples"
|
||||
],
|
||||
"infra_step": true,
|
||||
"name": "list lottie files",
|
||||
"~followup_annotations": [
|
||||
"@@@STEP_LOG_LINE@listdir@[START_DIR]/lottie-samples/LICENSE@@@",
|
||||
"@@@STEP_LOG_LINE@listdir@[START_DIR]/lottie-samples/lottie1.json@@@",
|
||||
"@@@STEP_LOG_LINE@listdir@[START_DIR]/lottie-samples/lottie2.json@@@",
|
||||
"@@@STEP_LOG_LINE@listdir@[START_DIR]/lottie-samples/lottie3.json@@@",
|
||||
"@@@STEP_LOG_END@listdir@@@"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"npm",
|
||||
"install"
|
||||
],
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env_prefixes": {
|
||||
"PATH": [
|
||||
"[START_DIR]/node/node/bin"
|
||||
]
|
||||
},
|
||||
"name": "npm install"
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"[START_DIR]/node/node/bin/node",
|
||||
"[START_DIR]/cache/work/skia/tools/skottie-wasm-perf/skottie-wasm-perf.js",
|
||||
"--canvaskit_js",
|
||||
"[START_DIR]/build/canvaskit.js",
|
||||
"--canvaskit_wasm",
|
||||
"[START_DIR]/build/canvaskit.wasm",
|
||||
"--use_gpu",
|
||||
"--input",
|
||||
"[START_DIR]/lottie-samples/lottie1.json",
|
||||
"--output",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie1.json"
|
||||
],
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
"name": "Run perf cmd line app"
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie1.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
],
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"name": "parse lottie1.json trace",
|
||||
"~followup_annotations": [
|
||||
"@@@STEP_LOG_LINE@json.output@{@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_avg_us\": 179.71, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_max_us\": 218.25, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_min_us\": 141.17@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@}@@@",
|
||||
"@@@STEP_LOG_END@json.output@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@import json@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@import sys@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@trace_output = sys.argv[1]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@with open(trace_output, 'r') as f:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ trace_json = json.load(f)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@accepted_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'missed_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'submitted_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'main_frame_aborted'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@current_frame_duration = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@total_frames = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_id_to_start_ts = {}@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Will contain tuples of frame_ids and their duration.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@completed_frame_id_and_duration = []@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@for trace in trace_json['traceEvents']:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if 'PipelineReporter' in trace['name']:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id = trace['id']@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ args = trace.get('args')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts[frame_id] = trace['ts']@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ elif args and (args.get('termination_status') in@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ accepted_termination_statuses):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if not frame_id_to_start_ts.get(frame_id):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '[No start ts found for %s]' % frame_id@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ continue@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_frames += 1@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ completed_frame_id_and_duration.append(@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ (frame_id, current_frame_duration))@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ # We are done with this frame_id so remove it from the dict.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts.pop(frame_id)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '%d (%s with %s): %d' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_frames, frame_id, args['termination_status'],@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ current_frame_duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ elif args and (args.get('termination_status') in@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ erroneous_termination_statuses):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ # Invalidate previously collected results for this frame_id.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if frame_id_to_start_ts.get(frame_id):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '[Invalidating %s due to %s]' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id, args['termination_status'])@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts.pop(frame_id)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@total_completed_frames = len(completed_frame_id_and_duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@if total_completed_frames < 25:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ raise Exception('Even with 2 loops found only %d frames' %@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_completed_frames)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Get frame avg/min/max for the middle 25 frames.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@start = (total_completed_frames - 25)/2@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'Got %d total completed frames. Using start_index of %d.' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_completed_frames, start)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_max = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_min = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_cumulative = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@for frame_id, duration in completed_frame_id_and_duration[start:start+25]:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_max = max(frame_max, duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_min = min(frame_min, duration) if frame_min else duration@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_cumulative += duration@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results = {}@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_max_us'] = frame_max@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_min_us'] = frame_min@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_avg_us'] = frame_cumulative/25@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'For 25 frames got: %s' % perf_results@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Write perf_results to the output json.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@with open(output_json_file, 'w') as f:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ f.write(json.dumps(perf_results))@@@",
|
||||
"@@@STEP_LOG_END@python.inline@@@"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"[START_DIR]/node/node/bin/node",
|
||||
"[START_DIR]/cache/work/skia/tools/skottie-wasm-perf/skottie-wasm-perf.js",
|
||||
"--canvaskit_js",
|
||||
"[START_DIR]/build/canvaskit.js",
|
||||
"--canvaskit_wasm",
|
||||
"[START_DIR]/build/canvaskit.wasm",
|
||||
"--use_gpu",
|
||||
"--input",
|
||||
"[START_DIR]/lottie-samples/lottie2.json",
|
||||
"--output",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie2.json"
|
||||
],
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
"name": "Run perf cmd line app (2)"
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie2.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
],
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"name": "parse lottie2.json trace",
|
||||
"~followup_annotations": [
|
||||
"@@@STEP_LOG_LINE@json.output@{@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_avg_us\": 179.71, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_max_us\": 218.25, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_min_us\": 141.17@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@}@@@",
|
||||
"@@@STEP_LOG_END@json.output@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@import json@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@import sys@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@trace_output = sys.argv[1]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@with open(trace_output, 'r') as f:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ trace_json = json.load(f)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@accepted_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'missed_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'submitted_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'main_frame_aborted'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@current_frame_duration = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@total_frames = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_id_to_start_ts = {}@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Will contain tuples of frame_ids and their duration.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@completed_frame_id_and_duration = []@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@for trace in trace_json['traceEvents']:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if 'PipelineReporter' in trace['name']:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id = trace['id']@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ args = trace.get('args')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts[frame_id] = trace['ts']@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ elif args and (args.get('termination_status') in@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ accepted_termination_statuses):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if not frame_id_to_start_ts.get(frame_id):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '[No start ts found for %s]' % frame_id@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ continue@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_frames += 1@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ completed_frame_id_and_duration.append(@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ (frame_id, current_frame_duration))@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ # We are done with this frame_id so remove it from the dict.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts.pop(frame_id)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '%d (%s with %s): %d' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_frames, frame_id, args['termination_status'],@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ current_frame_duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ elif args and (args.get('termination_status') in@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ erroneous_termination_statuses):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ # Invalidate previously collected results for this frame_id.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if frame_id_to_start_ts.get(frame_id):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '[Invalidating %s due to %s]' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id, args['termination_status'])@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts.pop(frame_id)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@total_completed_frames = len(completed_frame_id_and_duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@if total_completed_frames < 25:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ raise Exception('Even with 2 loops found only %d frames' %@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_completed_frames)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Get frame avg/min/max for the middle 25 frames.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@start = (total_completed_frames - 25)/2@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'Got %d total completed frames. Using start_index of %d.' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_completed_frames, start)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_max = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_min = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_cumulative = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@for frame_id, duration in completed_frame_id_and_duration[start:start+25]:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_max = max(frame_max, duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_min = min(frame_min, duration) if frame_min else duration@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_cumulative += duration@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results = {}@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_max_us'] = frame_max@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_min_us'] = frame_min@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_avg_us'] = frame_cumulative/25@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'For 25 frames got: %s' % perf_results@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Write perf_results to the output json.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@with open(output_json_file, 'w') as f:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ f.write(json.dumps(perf_results))@@@",
|
||||
"@@@STEP_LOG_END@python.inline@@@"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"[START_DIR]/node/node/bin/node",
|
||||
"[START_DIR]/cache/work/skia/tools/skottie-wasm-perf/skottie-wasm-perf.js",
|
||||
"--canvaskit_js",
|
||||
"[START_DIR]/build/canvaskit.js",
|
||||
"--canvaskit_wasm",
|
||||
"[START_DIR]/build/canvaskit.wasm",
|
||||
"--use_gpu",
|
||||
"--input",
|
||||
"[START_DIR]/lottie-samples/lottie3.json",
|
||||
"--output",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie3.json"
|
||||
],
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
"name": "Run perf cmd line app (3)"
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie3.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
],
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"name": "parse lottie3.json trace",
|
||||
"~followup_annotations": [
|
||||
"@@@STEP_LOG_LINE@json.output@{@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_avg_us\": 179.71, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_max_us\": 218.25, @@@",
|
||||
"@@@STEP_LOG_LINE@json.output@ \"frame_min_us\": 141.17@@@",
|
||||
"@@@STEP_LOG_LINE@json.output@}@@@",
|
||||
"@@@STEP_LOG_END@json.output@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@import json@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@import sys@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@trace_output = sys.argv[1]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@with open(trace_output, 'r') as f:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ trace_json = json.load(f)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@accepted_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'missed_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'submitted_frame',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'main_frame_aborted'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@current_frame_duration = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@total_frames = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_id_to_start_ts = {}@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Will contain tuples of frame_ids and their duration.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@completed_frame_id_and_duration = []@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@for trace in trace_json['traceEvents']:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if 'PipelineReporter' in trace['name']:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id = trace['id']@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ args = trace.get('args')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts[frame_id] = trace['ts']@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ elif args and (args.get('termination_status') in@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ accepted_termination_statuses):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if not frame_id_to_start_ts.get(frame_id):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '[No start ts found for %s]' % frame_id@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ continue@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_frames += 1@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ completed_frame_id_and_duration.append(@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ (frame_id, current_frame_duration))@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ # We are done with this frame_id so remove it from the dict.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts.pop(frame_id)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '%d (%s with %s): %d' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_frames, frame_id, args['termination_status'],@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ current_frame_duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ elif args and (args.get('termination_status') in@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ erroneous_termination_statuses):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ # Invalidate previously collected results for this frame_id.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ if frame_id_to_start_ts.get(frame_id):@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ print '[Invalidating %s due to %s]' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id, args['termination_status'])@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_id_to_start_ts.pop(frame_id)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@total_completed_frames = len(completed_frame_id_and_duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@if total_completed_frames < 25:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ raise Exception('Even with 2 loops found only %d frames' %@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_completed_frames)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Get frame avg/min/max for the middle 25 frames.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@start = (total_completed_frames - 25)/2@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'Got %d total completed frames. Using start_index of %d.' % (@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ total_completed_frames, start)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_max = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_min = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@frame_cumulative = 0@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@for frame_id, duration in completed_frame_id_and_duration[start:start+25]:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_max = max(frame_max, duration)@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_min = min(frame_min, duration) if frame_min else duration@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ frame_cumulative += duration@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results = {}@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_max_us'] = frame_max@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_min_us'] = frame_min@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@perf_results['frame_avg_us'] = frame_cumulative/25@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'For 25 frames got: %s' % perf_results@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Write perf_results to the output json.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@with open(output_json_file, 'w') as f:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ f.write(json.dumps(perf_results))@@@",
|
||||
"@@@STEP_LOG_END@python.inline@@@"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"RECIPE_MODULE[recipe_engine::file]/resources/fileutil.py",
|
||||
"--json-output",
|
||||
"/path/to/tmp/json",
|
||||
"rmtree",
|
||||
"[CLEANUP]/g3_try_tmp_1"
|
||||
],
|
||||
"infra_step": true,
|
||||
"name": "rmtree [CLEANUP]/g3_try_tmp_1"
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"import os\nprint os.environ.get('SWARMING_BOT_ID', '')\n"
|
||||
],
|
||||
"name": "get swarming bot id",
|
||||
"~followup_annotations": [
|
||||
"@@@STEP_LOG_LINE@python.inline@import os@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print os.environ.get('SWARMING_BOT_ID', '')@@@",
|
||||
"@@@STEP_LOG_END@python.inline@@@"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"import os\nprint os.environ.get('SWARMING_TASK_ID', '')\n"
|
||||
],
|
||||
"name": "get swarming task id",
|
||||
"~followup_annotations": [
|
||||
"@@@STEP_LOG_LINE@python.inline@import os@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print os.environ.get('SWARMING_TASK_ID', '')@@@",
|
||||
"@@@STEP_LOG_END@python.inline@@@"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"RECIPE_MODULE[recipe_engine::file]/resources/fileutil.py",
|
||||
"--json-output",
|
||||
"/path/to/tmp/json",
|
||||
"ensure-directory",
|
||||
"--mode",
|
||||
"0777",
|
||||
"[START_DIR]/[SWARM_OUT_DIR]"
|
||||
],
|
||||
"infra_step": true,
|
||||
"name": "makedirs perf_dir"
|
||||
},
|
||||
{
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"import json\nwith open('[START_DIR]/[SWARM_OUT_DIR]/perf_abc123_1337000001.json', 'w') as outfile:\n json.dump(obj={'gitHash': 'abc123', 'results': {'lottie3.json': {'gl': {'frame_avg_us': 179.71, 'frame_max_us': 218.25, 'frame_min_us': 141.17}}, 'lottie1.json': {'gl': {'frame_avg_us': 179.71, 'frame_max_us': 218.25, 'frame_min_us': 141.17}}, 'lottie2.json': {'gl': {'frame_avg_us': 179.71, 'frame_max_us': 218.25, 'frame_min_us': 141.17}}}, 'swarming_task_id': '', 'renderer': 'skottie-wasm', 'key': {'extra_config': 'SkottieWASM', 'bench_type': 'tracing', 'cpu_or_gpu_value': 'IntelIris640', 'arch': 'wasm', 'source_type': 'skottie', 'cpu_or_gpu': 'GPU', 'model': 'NUC7i5BNK', 'configuration': 'Release', 'os': 'Debian9', 'compiler': 'EMCC'}, 'swarming_bot_id': ''}, fp=outfile, indent=4)\n"
|
||||
],
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"name": "write output JSON",
|
||||
"~followup_annotations": [
|
||||
"@@@STEP_LOG_LINE@python.inline@import json@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@with open('[START_DIR]/[SWARM_OUT_DIR]/perf_abc123_1337000001.json', 'w') as outfile:@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ json.dump(obj={'gitHash': 'abc123', 'results': {'lottie3.json': {'gl': {'frame_avg_us': 179.71, 'frame_max_us': 218.25, 'frame_min_us': 141.17}}, 'lottie1.json': {'gl': {'frame_avg_us': 179.71, 'frame_max_us': 218.25, 'frame_min_us': 141.17}}, 'lottie2.json': {'gl': {'frame_avg_us': 179.71, 'frame_max_us': 218.25, 'frame_min_us': 141.17}}}, 'swarming_task_id': '', 'renderer': 'skottie-wasm', 'key': {'extra_config': 'SkottieWASM', 'bench_type': 'tracing', 'cpu_or_gpu_value': 'IntelIris640', 'arch': 'wasm', 'source_type': 'skottie', 'cpu_or_gpu': 'GPU', 'model': 'NUC7i5BNK', 'configuration': 'Release', 'os': 'Debian9', 'compiler': 'EMCC'}, 'swarming_bot_id': ''}, fp=outfile, indent=4)@@@",
|
||||
"@@@STEP_LOG_END@python.inline@@@"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "$result"
|
||||
}
|
||||
]
|
@ -144,6 +144,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -153,7 +154,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie1.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
@ -180,6 +181,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
@ -268,6 +275,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -277,7 +285,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie2.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
@ -304,6 +312,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
@ -392,6 +406,7 @@
|
||||
"cwd": "[START_DIR]/cache/work/skia/tools/skottie-wasm-perf",
|
||||
"env": {
|
||||
"CHROME_HEADLESS": "1",
|
||||
"DISPLAY": ":0",
|
||||
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
|
||||
},
|
||||
"infra_step": true,
|
||||
@ -401,7 +416,7 @@
|
||||
"cmd": [
|
||||
"python",
|
||||
"-u",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"\nimport json\nimport sys\n\ntrace_output = sys.argv[1]\nwith open(trace_output, 'r') as f:\n trace_json = json.load(f)\noutput_json_file = sys.argv[2]\nrenderer = sys.argv[3] # Unused for now but might be useful in the future.\n\n# Output data about the GPU that was used.\nprint 'GPU data:'\nprint trace_json['metadata'].get('gpu-gl-renderer')\nprint trace_json['metadata'].get('gpu-driver')\nprint trace_json['metadata'].get('gpu-gl-vendor')\n\nerroneous_termination_statuses = [\n 'replaced_by_new_reporter_at_same_stage',\n 'did_not_produce_frame',\n]\naccepted_termination_statuses = [\n 'missed_frame',\n 'submitted_frame',\n 'main_frame_aborted'\n]\n\ncurrent_frame_duration = 0\ntotal_frames = 0\nframe_id_to_start_ts = {}\n# Will contain tuples of frame_ids and their duration.\ncompleted_frame_id_and_duration = []\nfor trace in trace_json['traceEvents']:\n if 'PipelineReporter' in trace['name']:\n frame_id = trace['id']\n args = trace.get('args')\n if args and args.get('step') == 'BeginImplFrameToSendBeginMainFrame':\n frame_id_to_start_ts[frame_id] = trace['ts']\n elif args and (args.get('termination_status') in\n accepted_termination_statuses):\n if not frame_id_to_start_ts.get(frame_id):\n print '[No start ts found for %s]' % frame_id\n continue\n current_frame_duration = trace['ts'] - frame_id_to_start_ts[frame_id]\n total_frames += 1\n completed_frame_id_and_duration.append(\n (frame_id, current_frame_duration))\n # We are done with this frame_id so remove it from the dict.\n frame_id_to_start_ts.pop(frame_id)\n print '%d (%s with %s): %d' % (\n total_frames, frame_id, args['termination_status'],\n current_frame_duration)\n elif args and (args.get('termination_status') in\n erroneous_termination_statuses):\n # Invalidate previously collected results for this frame_id.\n if frame_id_to_start_ts.get(frame_id):\n print '[Invalidating %s due to %s]' % (\n frame_id, args['termination_status'])\n frame_id_to_start_ts.pop(frame_id)\n\ntotal_completed_frames = len(completed_frame_id_and_duration)\nif total_completed_frames < 25:\n raise Exception('Even with 2 loops found only %d frames' %\n total_completed_frames)\n\n# Get frame avg/min/max for the middle 25 frames.\nstart = (total_completed_frames - 25)/2\nprint 'Got %d total completed frames. Using start_index of %d.' % (\n total_completed_frames, start)\nframe_max = 0\nframe_min = 0\nframe_cumulative = 0\nfor frame_id, duration in completed_frame_id_and_duration[start:start+25]:\n frame_max = max(frame_max, duration)\n frame_min = min(frame_min, duration) if frame_min else duration\n frame_cumulative += duration\n\nperf_results = {}\nperf_results['frame_max_us'] = frame_max\nperf_results['frame_min_us'] = frame_min\nperf_results['frame_avg_us'] = frame_cumulative/25\nprint 'For 25 frames got: %s' % perf_results\n\n# Write perf_results to the output json.\nwith open(output_json_file, 'w') as f:\n f.write(json.dumps(perf_results))\n",
|
||||
"[CLEANUP]/g3_try_tmp_1/lottie3.json",
|
||||
"/path/to/tmp/json",
|
||||
"skottie-wasm"
|
||||
@ -428,6 +443,12 @@
|
||||
"@@@STEP_LOG_LINE@python.inline@output_json_file = sys.argv[2]@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@renderer = sys.argv[3] # Unused for now but might be useful in the future.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@# Output data about the GPU that was used.@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print 'GPU data:'@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-renderer')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-driver')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@print trace_json['metadata'].get('gpu-gl-vendor')@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@erroneous_termination_statuses = [@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'replaced_by_new_reporter_at_same_stage',@@@",
|
||||
"@@@STEP_LOG_LINE@python.inline@ 'did_not_produce_frame',@@@",
|
||||
|
@ -76,6 +76,9 @@ def RunSteps(api):
|
||||
else:
|
||||
raise Exception('Could not recognize the buildername %s' % buildername)
|
||||
|
||||
if api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
||||
perf_app_cmd.append('--use_gpu')
|
||||
|
||||
# Install prerequisites.
|
||||
env_prefixes = {'PATH': [api.path['start_dir'].join('node', 'node', 'bin')]}
|
||||
with api.context(cwd=perf_app_dir, env_prefixes=env_prefixes):
|
||||
@ -89,7 +92,7 @@ def RunSteps(api):
|
||||
if not lottie_filename.endswith('.json'):
|
||||
continue
|
||||
output_file = output_dir.join(lottie_filename)
|
||||
with api.context(cwd=perf_app_dir):
|
||||
with api.context(cwd=perf_app_dir, env={'DISPLAY': ':0'}):
|
||||
# This is occasionally flaky due to skbug.com/9207, adding retries.
|
||||
attempts = 3
|
||||
# Add output and input arguments to the cmd.
|
||||
@ -175,6 +178,12 @@ def parse_trace(trace_json, lottie_filename, api, renderer):
|
||||
output_json_file = sys.argv[2]
|
||||
renderer = sys.argv[3] # Unused for now but might be useful in the future.
|
||||
|
||||
# Output data about the GPU that was used.
|
||||
print 'GPU data:'
|
||||
print trace_json['metadata'].get('gpu-gl-renderer')
|
||||
print trace_json['metadata'].get('gpu-driver')
|
||||
print trace_json['metadata'].get('gpu-gl-vendor')
|
||||
|
||||
erroneous_termination_statuses = [
|
||||
'replaced_by_new_reporter_at_same_stage',
|
||||
'did_not_produce_frame',
|
||||
@ -305,6 +314,24 @@ def GenTests(api):
|
||||
api.json.output(parse_trace_json))
|
||||
)
|
||||
|
||||
skottie_gpu_buildername = ('Perf-Debian9-EMCC-NUC7i5BNK-GPU-IntelIris640-'
|
||||
'wasm-Release-All-SkottieWASM')
|
||||
yield (
|
||||
api.test('skottie_wasm_perf_gpu') +
|
||||
api.properties(buildername=skottie_gpu_buildername,
|
||||
repository='https://skia.googlesource.com/skia.git',
|
||||
revision='abc123',
|
||||
path_config='kitchen',
|
||||
trace_test_data=trace_output,
|
||||
swarm_out_dir='[SWARM_OUT_DIR]') +
|
||||
api.step_data('parse lottie1.json trace',
|
||||
api.json.output(parse_trace_json)) +
|
||||
api.step_data('parse lottie2.json trace',
|
||||
api.json.output(parse_trace_json)) +
|
||||
api.step_data('parse lottie3.json trace',
|
||||
api.json.output(parse_trace_json))
|
||||
)
|
||||
|
||||
lottieweb_cpu_buildername = ('Perf-Debian9-none-GCE-CPU-AVX2-x86_64-Release-'
|
||||
'All-LottieWeb')
|
||||
yield (
|
||||
|
Loading…
Reference in New Issue
Block a user