skia2/modules/canvaskit/perf/perfReporter.js
Kevin Lubick 371967f791 [canvaskit] Update Chrome version and use npm ci for tests
By using npm ci, we can make sure the versions of the helper
libraries (e.g. Karma, Jasmine) we are testing with locally
is the same as the versions we are using in the continuous
integration system.

The copying is needed because our docker recipe forces us
to run as not root, and this was causing some issues. As a
result, I changed the canvaskit test/perf to not re-use the
same file as pathkit does so copying was easier and the
dependencies between the two modules is broken.

Bug: skia:11077
Change-Id: Ib05890d666d3507d4f724a4ae298484629c7932a
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/343503
Reviewed-by: Kevin Lubick <kjlubick@google.com>
2020-12-14 15:03:42 +00:00

127 lines
3.5 KiB
JavaScript

const REPORT_URL = 'http://localhost:8081/report_perf_data'
// Set this to enforce that the perf server must be up.
// Typically used for debugging.
const fail_on_no_perf = false;
function benchmarkAndReport(benchName, setupFn, testFn, teardownFn) {
try {
let ctx = {};
// warmup 3 times (arbitrary choice)
setupFn(ctx);
testFn(ctx);
testFn(ctx);
testFn(ctx);
teardownFn(ctx);
ctx = {};
setupFn(ctx);
let start = Date.now();
let now = start;
times = 0;
// See how many times we can do it in 100ms (arbitrary choice)
while (now - start < 100) {
testFn(ctx);
now = Date.now();
times++;
}
teardownFn(ctx);
// Try to make it go for 2 seconds (arbitrarily chosen)
// Since the pre-try took 100ms, multiply by 20 to get
// approximate tries in 2s (unless now - start >> 100 ms)
let goalTimes = times * 20;
ctx = {};
setupFn(ctx);
times = 0;
start = Date.now();
while (times < goalTimes) {
testFn(ctx);
times++;
}
const end = Date.now();
teardownFn(ctx);
const us = (end - start) * 1000 / times;
console.log(benchName, `${us} microseconds`)
return _report(us, benchName);
} catch(e) {
console.error('caught error', e);
return Promise.reject(e);
}
}
// The same as benchmarkAndReport, except expects the third parameter, testFn, to return a promise
async function asyncBenchmarkAndReport(benchName, setupFn, testFn, teardownFn) {
try {
let ctx = {};
// warmup 3 times (arbitrary choice)
setupFn(ctx);
await testFn(ctx);
await testFn(ctx);
await testFn(ctx);
teardownFn(ctx);
ctx = {};
setupFn(ctx);
let start = Date.now();
let now = start;
times = 0;
// See how many times we can do it in 100ms (arbitrary choice)
while (now - start < 100) {
await testFn(ctx);
now = Date.now();
times++;
}
teardownFn(ctx);
// Try to make it go for 2 seconds (arbitrarily chosen)
// Since the pre-try took 100ms, multiply by 20 to get
// approximate tries in 2s (unless now - start >> 100 ms)
let goalTimes = times * 20;
ctx = {};
setupFn(ctx);
times = 0;
start = Date.now();
while (times < goalTimes) {
await testFn(ctx);
times++;
}
const end = Date.now();
teardownFn(ctx);
const us = (end - start) * 1000 / times;
console.log(benchName, `${us} microseconds`)
return _report(us, benchName);
} catch(e) {
console.error('caught error', e);
return Promise.reject(e);
}
}
function _report(microseconds, benchName) {
return fetch(REPORT_URL, {
method: 'POST',
mode: 'no-cors',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
'bench_name': benchName,
'time_us': microseconds,
})
}).then(() => console.log(`Successfully reported ${benchName} to perf aggregator`));
}
function reportError(done) {
return (e) => {
console.log("Error with fetching. Likely could not connect to aggegator server", e.message);
if (fail_on_no_perf) {
expect(e).toBeUndefined();
}
done();
};
}