nanobench: Add per-run time reporting.
This patch adds per-benchmark-iteration times to our JSON output. Given that we already collect these statistics, giving them to the user would be nice. No unit-test provided, since `rgrep -i json tests` yielded nothing. Happy to add one if someone wants. BUG=None. TEST=nanobench now writes per-run timinings with the output JSON. Change-Id: I910f1d97fd3e0ee69fc8e78e011e67b9c866f18d Reviewed-on: https://skia-review.googlesource.com/5617 Reviewed-by: Mike Klein <mtklein@chromium.org> Commit-Queue: Ravi Mistry <rmistry@google.com>
This commit is contained in:
parent
c57c7c948a
commit
75b5718d20
@ -47,6 +47,9 @@ public:
|
||||
// Record a single test metric.
|
||||
virtual void metric(const char name[], double ms) {}
|
||||
|
||||
// Record a list of test metrics.
|
||||
virtual void metrics(const char name[], const SkTArray<double>& array) {}
|
||||
|
||||
// Flush to storage now please.
|
||||
virtual void flush() {}
|
||||
};
|
||||
@ -114,6 +117,16 @@ public:
|
||||
SkASSERT(fConfig);
|
||||
(*fConfig)[name] = ms;
|
||||
}
|
||||
void metrics(const char name[], const SkTArray<double>& array) override {
|
||||
SkASSERT(fConfig);
|
||||
Json::Value value = Json::Value(Json::arrayValue);
|
||||
value.resize(array.count());
|
||||
for (int i = 0; i < array.count(); i++) {
|
||||
// Don't care about nan-ness.
|
||||
value[i] = array[i];
|
||||
}
|
||||
(*fConfig)[name] = std::move(value);
|
||||
}
|
||||
|
||||
// Flush to storage now please.
|
||||
void flush() override {
|
||||
|
@ -1288,6 +1288,7 @@ int nanobench_main() {
|
||||
benchStream.fillCurrentOptions(log.get());
|
||||
target->fillOptions(log.get());
|
||||
log->metric("min_ms", stats.min);
|
||||
log->metrics("samples", samples);
|
||||
#if SK_SUPPORT_GPU
|
||||
if (gpuStatsDump) {
|
||||
// dump to json, only SKPBench currently returns valid keys / values
|
||||
|
Loading…
Reference in New Issue
Block a user