[canvaskit] Update Chrome version and use npm ci for tests
By using npm ci, we can make sure the versions of the helper libraries (e.g. Karma, Jasmine) we are testing with locally is the same as the versions we are using in the continuous integration system. The copying is needed because our docker recipe forces us to run as not root, and this was causing some issues. As a result, I changed the canvaskit test/perf to not re-use the same file as pathkit does so copying was easier and the dependencies between the two modules is broken. Bug: skia:11077 Change-Id: Ib05890d666d3507d4f724a4ae298484629c7932a Reviewed-on: https://skia-review.googlesource.com/c/skia/+/343503 Reviewed-by: Kevin Lubick <kjlubick@google.com>
This commit is contained in:
parent
4b208f76f0
commit
371967f791
@ -216,7 +216,7 @@
|
||||
"type=bind,source=[START_DIR],target=/SRC",
|
||||
"--mount",
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:77.0.3865.120_v1",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/canvaskit/perf_canvaskit.sh",
|
||||
"--builder",
|
||||
"Perf-Debian10-EMCC-GCE-CPU-AVX2-wasm-Release-All-CanvasKit",
|
||||
|
@ -216,7 +216,7 @@
|
||||
"type=bind,source=[START_DIR],target=/SRC",
|
||||
"--mount",
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:77.0.3865.120_v1",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/canvaskit/perf_canvaskit.sh",
|
||||
"--builder",
|
||||
"Perf-Debian10-EMCC-GCE-GPU-AVX2-wasm-Release-All-CanvasKit",
|
||||
|
@ -19,7 +19,7 @@ DEPS = [
|
||||
]
|
||||
|
||||
|
||||
DOCKER_IMAGE = 'gcr.io/skia-public/perf-karma-chrome-tests:77.0.3865.120_v1'
|
||||
DOCKER_IMAGE = 'gcr.io/skia-public/perf-karma-chrome-tests:87.0.4280.88_v1'
|
||||
INNER_KARMA_SCRIPT = 'skia/infra/canvaskit/perf_canvaskit.sh'
|
||||
|
||||
|
||||
|
@ -233,7 +233,7 @@
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"--env",
|
||||
"ASM_JS=1",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:77.0.3865.120_v1",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/pathkit/perf_pathkit.sh",
|
||||
"--builder",
|
||||
"Perf-Debian10-EMCC-GCE-CPU-AVX2-asmjs-Release-All-PathKit",
|
||||
|
@ -231,7 +231,7 @@
|
||||
"type=bind,source=[START_DIR],target=/SRC",
|
||||
"--mount",
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:77.0.3865.120_v1",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/pathkit/perf_pathkit.sh",
|
||||
"--builder",
|
||||
"Perf-Debian10-EMCC-GCE-CPU-AVX2-wasm-Release-All-PathKit",
|
||||
|
@ -231,7 +231,7 @@
|
||||
"type=bind,source=[START_DIR],target=/SRC",
|
||||
"--mount",
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:77.0.3865.120_v1",
|
||||
"gcr.io/skia-public/perf-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/pathkit/perf_pathkit.sh",
|
||||
"--builder",
|
||||
"Perf-Debian10-EMCC-GCE-CPU-AVX2-wasm-Release-All-PathKit",
|
||||
|
@ -19,7 +19,7 @@ DEPS = [
|
||||
]
|
||||
|
||||
|
||||
DOCKER_IMAGE = 'gcr.io/skia-public/perf-karma-chrome-tests:77.0.3865.120_v1'
|
||||
DOCKER_IMAGE = 'gcr.io/skia-public/perf-karma-chrome-tests:87.0.4280.88_v1'
|
||||
INNER_KARMA_SCRIPT = 'skia/infra/pathkit/perf_pathkit.sh'
|
||||
|
||||
|
||||
|
@ -231,7 +231,7 @@
|
||||
"type=bind,source=[START_DIR],target=/SRC",
|
||||
"--mount",
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:77.0.3865.120_v2",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/canvaskit/test_canvaskit.sh",
|
||||
"--builder",
|
||||
"Test-Debian10-EMCC-GCE-GPU-WEBGL1-wasm-Debug-All-CanvasKit",
|
||||
|
@ -231,7 +231,7 @@
|
||||
"type=bind,source=[START_DIR],target=/SRC",
|
||||
"--mount",
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:77.0.3865.120_v2",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/canvaskit/test_canvaskit.sh",
|
||||
"--builder",
|
||||
"Test-Debian10-EMCC-GCE-CPU-AVX2-wasm-Debug-All-CanvasKit",
|
||||
|
@ -19,7 +19,7 @@ DEPS = [
|
||||
]
|
||||
|
||||
|
||||
DOCKER_IMAGE = 'gcr.io/skia-public/gold-karma-chrome-tests:77.0.3865.120_v2'
|
||||
DOCKER_IMAGE = 'gcr.io/skia-public/gold-karma-chrome-tests:87.0.4280.88_v1'
|
||||
INNER_KARMA_SCRIPT = 'skia/infra/canvaskit/test_canvaskit.sh'
|
||||
|
||||
|
||||
|
@ -171,7 +171,7 @@
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"--env",
|
||||
"ASM_JS=1",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:77.0.3865.120_v2",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/pathkit/test_pathkit.sh",
|
||||
"--builder",
|
||||
"Test-Debian10-EMCC-GCE-CPU-AVX2-asmjs-Debug-All-PathKit",
|
||||
|
@ -218,7 +218,7 @@
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"--env",
|
||||
"ASM_JS=1",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:77.0.3865.120_v2",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/pathkit/test_pathkit.sh",
|
||||
"--builder",
|
||||
"Test-Debian10-EMCC-GCE-CPU-AVX2-asmjs-Release-All-PathKit",
|
||||
|
@ -216,7 +216,7 @@
|
||||
"type=bind,source=[START_DIR],target=/SRC",
|
||||
"--mount",
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:77.0.3865.120_v2",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/pathkit/test_pathkit.sh",
|
||||
"--builder",
|
||||
"Test-Debian10-EMCC-GCE-CPU-AVX2-wasm-Debug-All-PathKit",
|
||||
|
@ -216,7 +216,7 @@
|
||||
"type=bind,source=[START_DIR],target=/SRC",
|
||||
"--mount",
|
||||
"type=bind,source=[START_DIR]/[SWARM_OUT_DIR],target=/OUT",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:77.0.3865.120_v2",
|
||||
"gcr.io/skia-public/gold-karma-chrome-tests:87.0.4280.88_v1",
|
||||
"/SRC/skia/infra/pathkit/test_pathkit.sh",
|
||||
"--builder",
|
||||
"Test-Debian10-EMCC-GCE-CPU-AVX2-wasm-Debug-All-PathKit",
|
||||
|
@ -19,7 +19,7 @@ DEPS = [
|
||||
]
|
||||
|
||||
|
||||
DOCKER_IMAGE = 'gcr.io/skia-public/gold-karma-chrome-tests:77.0.3865.120_v2'
|
||||
DOCKER_IMAGE = 'gcr.io/skia-public/gold-karma-chrome-tests:87.0.4280.88_v1'
|
||||
INNER_KARMA_SCRIPT = 'skia/infra/pathkit/test_pathkit.sh'
|
||||
|
||||
|
||||
|
@ -14,12 +14,18 @@ set -ex
|
||||
BASE_DIR=`cd $(dirname ${BASH_SOURCE[0]}) && pwd`
|
||||
CANVASKIT_DIR=$BASE_DIR/../../modules/canvaskit
|
||||
|
||||
# We avoid a lot of file permission errors by copying the tests and binary into the Docker
|
||||
# container and running npm ci there.
|
||||
cp -R $CANVASKIT_DIR /tmp/work
|
||||
cd /tmp/work
|
||||
npm ci --cache /tmp/npmcache
|
||||
|
||||
# Start the aggregator in the background
|
||||
/opt/perf-aggregator $@ &
|
||||
# Run the tests 10 times to get a wide set of data
|
||||
for i in `seq 1 10`;
|
||||
do
|
||||
npx karma start $CANVASKIT_DIR/karma.bench.conf.js --single-run
|
||||
npx karma start /tmp/work/karma.bench.conf.js --single-run
|
||||
done
|
||||
# Tell the aggregator to dump the json
|
||||
# This curl command gets the HTTP code and stores it into $CODE
|
||||
|
@ -14,10 +14,16 @@ set -ex
|
||||
BASE_DIR=`cd $(dirname ${BASH_SOURCE[0]}) && pwd`
|
||||
CANVASKIT_DIR=$BASE_DIR/../../modules/canvaskit
|
||||
|
||||
# We avoid a lot of file permission errors by copying the tests and binary into the Docker
|
||||
# container and running npm ci there.
|
||||
cp -R $CANVASKIT_DIR /tmp/work
|
||||
cd /tmp/work
|
||||
npm ci --cache /tmp/npmcache
|
||||
|
||||
# Start the aggregator in the background
|
||||
/opt/gold-aggregator $@ &
|
||||
# Run the tests
|
||||
npx karma start $CANVASKIT_DIR/karma.conf.js --single-run
|
||||
npx karma start /tmp/work/karma.conf.js --single-run
|
||||
# Tell the aggregator to dump the json
|
||||
# This curl command gets the HTTP code and stores it into $CODE
|
||||
CODE=`curl -s -o /dev/null -I -w "%{http_code}" -X POST localhost:8081/dump_json`
|
||||
|
@ -14,12 +14,18 @@ set -ex
|
||||
BASE_DIR=`cd $(dirname ${BASH_SOURCE[0]}) && pwd`
|
||||
PATHKIT_DIR=$BASE_DIR/../../modules/pathkit
|
||||
|
||||
# We avoid a lot of file permission errors by copying the tests and binary into the Docker
|
||||
# container and running npm ci there.
|
||||
cp -R $PATHKIT_DIR /tmp/work
|
||||
cd /tmp/work
|
||||
npm ci --cache /tmp/npmcache
|
||||
|
||||
# Start the aggregator in the background
|
||||
/opt/perf-aggregator $@ &
|
||||
# Run the tests 10 times to get a wide set of data
|
||||
for i in `seq 1 10`;
|
||||
do
|
||||
npx karma start $PATHKIT_DIR/karma.bench.conf.js --single-run
|
||||
npx karma start /tmp/work/karma.bench.conf.js --single-run
|
||||
done
|
||||
# Tell the aggregator to dump the json
|
||||
# This curl command gets the HTTP code and stores it into $CODE
|
||||
|
@ -14,10 +14,16 @@ set -ex
|
||||
BASE_DIR=`cd $(dirname ${BASH_SOURCE[0]}) && pwd`
|
||||
PATHKIT_DIR=$BASE_DIR/../../modules/pathkit
|
||||
|
||||
# We avoid a lot of file permission errors by copying the tests and binary into the Docker
|
||||
# container and running npm ci there.
|
||||
cp -R $PATHKIT_DIR /tmp/work
|
||||
cd /tmp/work
|
||||
npm ci --cache /tmp/npmcache
|
||||
|
||||
# Start the aggregator in the background
|
||||
/opt/gold-aggregator $@ &
|
||||
# Run the tests
|
||||
npx karma start $PATHKIT_DIR/karma.conf.js --single-run
|
||||
npx karma start /tmp/work/karma.conf.js --single-run
|
||||
# Tell the aggregator to dump the json
|
||||
# This curl command gets the HTTP code and stores it into $CODE
|
||||
CODE=`curl -s -o /dev/null -I -w "%{http_code}" -X POST localhost:8081/dump_json`
|
||||
|
@ -16,7 +16,7 @@ aggregator:
|
||||
|
||||
# Can check CHROME_VERSION with
|
||||
# docker run karma-chrome-tests /usr/bin/google-chrome-stable --version
|
||||
CHROME_VERSION=77.0.3865.120_v2
|
||||
CHROME_VERSION=87.0.4280.88_v1
|
||||
|
||||
publish_gold_karma_chrome_tests: gold_docker_image
|
||||
docker tag gold-karma-chrome-tests gcr.io/skia-public/gold-karma-chrome-tests:${CHROME_VERSION}
|
||||
|
@ -2,7 +2,7 @@ EMSDK_VERSION=2.0.6_v1
|
||||
|
||||
# Can check CHROME_VERSION with
|
||||
# docker run karma-chrome-tests /usr/bin/google-chrome-stable --version
|
||||
CHROME_VERSION=77.0.3865.120_v1
|
||||
CHROME_VERSION=87.0.4280.88_v1
|
||||
|
||||
publish_emsdk_base:
|
||||
docker build --no-cache -t emsdk-base ./emsdk-base/
|
||||
|
@ -1,7 +1,7 @@
|
||||
Docker
|
||||
======
|
||||
|
||||
Docker files to ease working with PathKit and WASM.
|
||||
Docker files to ease working with PathKit and CanvasKit.
|
||||
|
||||
emsdk-base
|
||||
----------
|
||||
|
@ -4,6 +4,6 @@
|
||||
# Tests will be run as non-root (user skia, in fact), so /OUT should have permissions
|
||||
# 777 so as to be able to create output there.
|
||||
|
||||
FROM gcr.io/skia-public/karma-chrome-tests:77.0.3865.120_v1
|
||||
FROM gcr.io/skia-public/karma-chrome-tests:87.0.4280.88_v1
|
||||
|
||||
COPY /tmp/gold-aggregator /opt/gold-aggregator
|
@ -4,7 +4,7 @@
|
||||
# Tests will be run as non-root (user skia, in fact), so /OUT should have permissions
|
||||
# 777 so as to be able to create output there.
|
||||
|
||||
FROM node:8.11
|
||||
FROM node:14.15
|
||||
|
||||
RUN apt-get update && apt-get upgrade -y
|
||||
|
||||
@ -17,14 +17,6 @@ RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key
|
||||
RUN sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list'
|
||||
RUN apt-get update && apt-get install -y google-chrome-stable
|
||||
|
||||
RUN npm install --global jasmine-core@3.1.0 karma@2.0.5 \
|
||||
karma-chrome-launcher@2.2.0 karma-jasmine@1.1.2 requirejs@2.3.5 \
|
||||
is-docker@1.1.0
|
||||
|
||||
# Allows require('is-docker') or require('karma') to work from anywhere.
|
||||
# https://stackoverflow.com/a/15646750
|
||||
ENV NODE_PATH=/usr/local/lib/node_modules
|
||||
|
||||
#Add user so we don't have to run as root (prevents us from over-writing files in /SRC)
|
||||
RUN groupadd -g 2000 skia \
|
||||
&& useradd -u 2000 -g 2000 skia \
|
||||
|
@ -4,6 +4,6 @@
|
||||
# Tests will be run as non-root (user skia, in fact), so /OUT should have permissions
|
||||
# 777 so as to be able to create output there.
|
||||
|
||||
FROM gcr.io/skia-public/karma-chrome-tests:77.0.3865.120_v1
|
||||
FROM gcr.io/skia-public/karma-chrome-tests:87.0.4280.88_v1
|
||||
|
||||
COPY /tmp/perf-aggregator /opt/perf-aggregator
|
@ -44,7 +44,7 @@ var (
|
||||
config = flag.String("config", "Release", "Configuration (e.g. Debug/Release) key")
|
||||
gitHash = flag.String("git_hash", "-", "The git commit hash of the version being tested")
|
||||
hostOS = flag.String("host_os", "Debian9", "OS Key")
|
||||
issue = flag.String("issue", "", "ChangeListID (if tryjob)")
|
||||
issue = flag.String("issue", "", "ChangelistID (if tryjob)")
|
||||
patchset = flag.Int("patchset", 0, "patchset (if tryjob)")
|
||||
taskId = flag.String("task_id", "", "swarming task id")
|
||||
sourceType = flag.String("source_type", "pathkit", "Gold Source type, like pathkit,canvaskit")
|
||||
@ -177,12 +177,12 @@ func dumpJSON(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
dmresults := jsonio.GoldResults{
|
||||
Builder: *builder,
|
||||
ChangeListID: *issue,
|
||||
ChangelistID: *issue,
|
||||
CodeReviewSystem: "gerrit",
|
||||
ContinuousIntegrationSystem: "buildbucket",
|
||||
GitHash: *gitHash,
|
||||
Key: defaultKeys,
|
||||
PatchSetOrder: *patchset,
|
||||
PatchsetOrder: *patchset,
|
||||
Results: results,
|
||||
TaskID: *taskId,
|
||||
TryJobID: *buildBucketID,
|
||||
|
@ -12,7 +12,7 @@ module.exports = function(config) {
|
||||
files: [
|
||||
{ pattern: 'canvaskit/bin/canvaskit.wasm', included:false, served:true},
|
||||
{ pattern: 'perf/assets/*', included:false, served:true},
|
||||
'../../modules/pathkit/perf/perfReporter.js',
|
||||
'perf/perfReporter.js',
|
||||
'canvaskit/bin/canvaskit.js',
|
||||
'tests/canvaskitinit.js',
|
||||
'tests/util.js',
|
||||
|
@ -12,7 +12,7 @@ module.exports = function(config) {
|
||||
files: [
|
||||
{ pattern: 'canvaskit/bin/canvaskit.wasm', included:false, served:true},
|
||||
{ pattern: 'tests/assets/*', included:false, served:true},
|
||||
'../../modules/pathkit/tests/testReporter.js',
|
||||
'tests/testReporter.js',
|
||||
'canvaskit/bin/canvaskit.js',
|
||||
'tests/canvaskitinit.js',
|
||||
'tests/util.js',
|
||||
|
126
modules/canvaskit/perf/perfReporter.js
Normal file
126
modules/canvaskit/perf/perfReporter.js
Normal file
@ -0,0 +1,126 @@
|
||||
const REPORT_URL = 'http://localhost:8081/report_perf_data'
|
||||
// Set this to enforce that the perf server must be up.
|
||||
// Typically used for debugging.
|
||||
const fail_on_no_perf = false;
|
||||
|
||||
function benchmarkAndReport(benchName, setupFn, testFn, teardownFn) {
|
||||
try {
|
||||
let ctx = {};
|
||||
// warmup 3 times (arbitrary choice)
|
||||
setupFn(ctx);
|
||||
testFn(ctx);
|
||||
testFn(ctx);
|
||||
testFn(ctx);
|
||||
teardownFn(ctx);
|
||||
|
||||
ctx = {};
|
||||
setupFn(ctx);
|
||||
let start = Date.now();
|
||||
let now = start;
|
||||
times = 0;
|
||||
// See how many times we can do it in 100ms (arbitrary choice)
|
||||
while (now - start < 100) {
|
||||
testFn(ctx);
|
||||
now = Date.now();
|
||||
times++;
|
||||
}
|
||||
|
||||
teardownFn(ctx);
|
||||
|
||||
// Try to make it go for 2 seconds (arbitrarily chosen)
|
||||
// Since the pre-try took 100ms, multiply by 20 to get
|
||||
// approximate tries in 2s (unless now - start >> 100 ms)
|
||||
let goalTimes = times * 20;
|
||||
ctx = {};
|
||||
setupFn(ctx);
|
||||
times = 0;
|
||||
start = Date.now();
|
||||
while (times < goalTimes) {
|
||||
testFn(ctx);
|
||||
times++;
|
||||
}
|
||||
const end = Date.now();
|
||||
teardownFn(ctx);
|
||||
|
||||
const us = (end - start) * 1000 / times;
|
||||
console.log(benchName, `${us} microseconds`)
|
||||
return _report(us, benchName);
|
||||
} catch(e) {
|
||||
console.error('caught error', e);
|
||||
return Promise.reject(e);
|
||||
}
|
||||
}
|
||||
|
||||
// The same as benchmarkAndReport, except expects the third parameter, testFn, to return a promise
|
||||
async function asyncBenchmarkAndReport(benchName, setupFn, testFn, teardownFn) {
|
||||
try {
|
||||
let ctx = {};
|
||||
// warmup 3 times (arbitrary choice)
|
||||
setupFn(ctx);
|
||||
await testFn(ctx);
|
||||
await testFn(ctx);
|
||||
await testFn(ctx);
|
||||
teardownFn(ctx);
|
||||
|
||||
ctx = {};
|
||||
setupFn(ctx);
|
||||
let start = Date.now();
|
||||
let now = start;
|
||||
times = 0;
|
||||
// See how many times we can do it in 100ms (arbitrary choice)
|
||||
while (now - start < 100) {
|
||||
await testFn(ctx);
|
||||
now = Date.now();
|
||||
times++;
|
||||
}
|
||||
|
||||
teardownFn(ctx);
|
||||
|
||||
// Try to make it go for 2 seconds (arbitrarily chosen)
|
||||
// Since the pre-try took 100ms, multiply by 20 to get
|
||||
// approximate tries in 2s (unless now - start >> 100 ms)
|
||||
let goalTimes = times * 20;
|
||||
ctx = {};
|
||||
setupFn(ctx);
|
||||
times = 0;
|
||||
start = Date.now();
|
||||
while (times < goalTimes) {
|
||||
await testFn(ctx);
|
||||
times++;
|
||||
}
|
||||
const end = Date.now();
|
||||
teardownFn(ctx);
|
||||
|
||||
const us = (end - start) * 1000 / times;
|
||||
console.log(benchName, `${us} microseconds`)
|
||||
return _report(us, benchName);
|
||||
} catch(e) {
|
||||
console.error('caught error', e);
|
||||
return Promise.reject(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _report(microseconds, benchName) {
|
||||
return fetch(REPORT_URL, {
|
||||
method: 'POST',
|
||||
mode: 'no-cors',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
'bench_name': benchName,
|
||||
'time_us': microseconds,
|
||||
})
|
||||
}).then(() => console.log(`Successfully reported ${benchName} to perf aggregator`));
|
||||
}
|
||||
|
||||
function reportError(done) {
|
||||
return (e) => {
|
||||
console.log("Error with fetching. Likely could not connect to aggegator server", e.message);
|
||||
if (fail_on_no_perf) {
|
||||
expect(e).toBeUndefined();
|
||||
}
|
||||
done();
|
||||
};
|
||||
}
|
129
modules/canvaskit/tests/testReporter.js
Normal file
129
modules/canvaskit/tests/testReporter.js
Normal file
@ -0,0 +1,129 @@
|
||||
const REPORT_URL = 'http://localhost:8081/report_gold_data'
|
||||
// Set this to enforce that the gold server must be up.
|
||||
// Typically used for debugging.
|
||||
const fail_on_no_gold = false;
|
||||
|
||||
function reportCanvas(canvas, testname, outputType='canvas') {
|
||||
let b64 = canvas.toDataURL('image/png');
|
||||
return _report(b64, outputType, testname);
|
||||
}
|
||||
|
||||
function reportSVG(svg, testname) {
|
||||
// This converts an SVG to a base64 encoded PNG. It basically creates an
|
||||
// <img> element that takes the inlined SVG and draws it on a canvas.
|
||||
// The trick is we have to wait until the image is loaded, thus the Promise
|
||||
// wrapping below.
|
||||
let svgStr = svg.outerHTML;
|
||||
let tempImg = document.createElement('img');
|
||||
|
||||
let tempCanvas = document.createElement('canvas');
|
||||
let canvasCtx = tempCanvas.getContext('2d');
|
||||
setCanvasSize(canvasCtx, svg.getAttribute('width'), svg.getAttribute('height'));
|
||||
|
||||
return new Promise(function(resolve, reject) {
|
||||
tempImg.onload = () => {
|
||||
canvasCtx.drawImage(tempImg, 0, 0);
|
||||
let b64 = tempCanvas.toDataURL('image/png');
|
||||
_report(b64, 'svg', testname).then(() => {
|
||||
resolve();
|
||||
}).catch((e) => reject(e));
|
||||
};
|
||||
tempImg.setAttribute('src', 'data:image/svg+xml;,' + svgStr);
|
||||
});
|
||||
}
|
||||
|
||||
// For tests that just do a simple path and return it as a string, wrap it in
|
||||
// a proper svg and send it off. Supports fill (nofill means just stroke it).
|
||||
// This uses the "standard" size of 600x600.
|
||||
function reportSVGString(svgstr, testname, fillRule='nofill') {
|
||||
let newPath = document.createElementNS('http://www.w3.org/2000/svg', 'path');
|
||||
newPath.setAttribute('stroke', 'black');
|
||||
if (fillRule !== 'nofill') {
|
||||
newPath.setAttribute('fill', 'orange');
|
||||
newPath.setAttribute('fill-rule', fillRule);
|
||||
} else {
|
||||
newPath.setAttribute('fill', 'rgba(255,255,255,0.0)');
|
||||
}
|
||||
newPath.setAttribute('d', svgstr);
|
||||
let newSVG = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
||||
newSVG.appendChild(newPath);
|
||||
// helps with the conversion to PNG.
|
||||
newSVG.setAttribute('xmlns', 'http://www.w3.org/2000/svg');
|
||||
newSVG.setAttribute('width', 600);
|
||||
newSVG.setAttribute('height', 600);
|
||||
return reportSVG(newSVG, testname);
|
||||
}
|
||||
|
||||
// Reports a canvas and then an SVG of this path. Puts it on a standard size canvas.
|
||||
function reportPath(path, testname, done) {
|
||||
let canvas = document.createElement('canvas');
|
||||
let canvasCtx = canvas.getContext('2d');
|
||||
// Set canvas size and make it a bit bigger to zoom in on the lines
|
||||
standardizedCanvasSize(canvasCtx);
|
||||
canvasCtx.stroke(path.toPath2D());
|
||||
|
||||
let svgStr = path.toSVGString();
|
||||
|
||||
return reportCanvas(canvas, testname).then(() => {
|
||||
reportSVGString(svgStr, testname).then(() => {
|
||||
done();
|
||||
}).catch(reportError(done));
|
||||
}).catch(reportError(done));
|
||||
}
|
||||
|
||||
// data is a base64 encoded png, outputType is the value that goes with the
|
||||
// key 'config' when reporting.
|
||||
function _report(data, outputType, testname) {
|
||||
return fetch(REPORT_URL, {
|
||||
method: 'POST',
|
||||
mode: 'no-cors',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
'output_type': outputType,
|
||||
'data': data,
|
||||
'test_name': testname,
|
||||
})
|
||||
}).then(() => console.log(`Successfully reported ${testname} to gold aggregator`));
|
||||
}
|
||||
|
||||
function reportError(done) {
|
||||
return (e) => {
|
||||
console.log("Error with fetching. Likely could not connect to aggregator server", e.message);
|
||||
if (fail_on_no_gold) {
|
||||
expect(e).toBeUndefined();
|
||||
}
|
||||
done();
|
||||
};
|
||||
}
|
||||
|
||||
function setCanvasSize(ctx, width, height) {
|
||||
ctx.canvas.width = width;
|
||||
ctx.canvas.height = height;
|
||||
}
|
||||
|
||||
function standardizedCanvasSize(ctx) {
|
||||
setCanvasSize(ctx, 600, 600);
|
||||
}
|
||||
|
||||
// A wrapper to catch and print a stacktrace to the logs.
|
||||
// Exceptions normally shows up in the browser console,
|
||||
// but not in the logs that appear on the bots AND a thrown
|
||||
// exception will normally cause a test to time out.
|
||||
// This wrapper mitigates both those pain points.
|
||||
function catchException(done, fn) {
|
||||
return () => {
|
||||
try {
|
||||
fn()
|
||||
} catch (e) {
|
||||
console.log('Failed with the following error', e);
|
||||
expect(e).toBeFalsy();
|
||||
debugger;
|
||||
done();
|
||||
}
|
||||
// We don't call done with finally because
|
||||
// that would make the break the asynchronous nature
|
||||
// of fn().
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user