[canvaskit] Add unit tests to wasm_gm_tests
There are currently many tests skipped, but many more pass. This changes the built binary to have a lot of debugging logic in it so we should be able to get backtraces on those crashes more easily when debugging. gmtests.html was removed as it was superceded by run-wasm-gm-tests and make run_local. Bug: skia:10812, skia:10869 Change-Id: I72ab34d3db83a654dc8829831b3ecb795fe23d43 Reviewed-on: https://skia-review.googlesource.com/c/skia/+/329170 Reviewed-by: Chris Dalton <csmartdalton@google.com> Reviewed-by: Nathaniel Nifong <nifong@google.com> Commit-Queue: Kevin Lubick <kjlubick@google.com>
This commit is contained in:
parent
4dd3c8cbef
commit
dffd20efe9
@ -1734,7 +1734,7 @@ func (b *jobBuilder) compileWasmGMTests(compileName string) {
|
||||
b.cipd(CIPD_PKG_LUCI_AUTH)
|
||||
b.dep(b.buildTaskDrivers())
|
||||
b.output("wasm_out")
|
||||
b.timeout(20 * time.Minute)
|
||||
b.timeout(60 * time.Minute)
|
||||
b.isolate("compile.isolate")
|
||||
b.serviceAccount(b.cfg.ServiceAccountCompile)
|
||||
b.cache(CACHES_DOCKER...)
|
||||
|
@ -193,7 +193,6 @@ func runTests(ctx context.Context, builtPath, nodeBinPath, resourcePath, testHar
|
||||
ctx = td.StartStep(ctx, td.Props("run GMs and unit tests"))
|
||||
defer td.EndStep(ctx)
|
||||
|
||||
// TODO(kjlubick) the test harness does not actually run unit tests yet.
|
||||
err := td.Do(ctx, td.Props("Run GMs and Unit Tests"), func(ctx context.Context) error {
|
||||
args := []string{filepath.Join(nodeBinPath, "node"),
|
||||
"run-wasm-gm-tests",
|
||||
@ -203,7 +202,7 @@ func runTests(ctx context.Context, builtPath, nodeBinPath, resourcePath, testHar
|
||||
"--use_gpu", // TODO(kjlubick) use webglVersion and account for CPU
|
||||
"--output", workPath,
|
||||
"--resources", resourcePath,
|
||||
"--timeout", "120", // 120 seconds per batch of 50 tests.
|
||||
"--timeout", "180", // 180 seconds per batch of 50 tests.
|
||||
}
|
||||
|
||||
_, err := exec.RunCwd(ctx, testHarnessPath, args...)
|
||||
|
@ -8533,8 +8533,8 @@
|
||||
"os:Debian-10.3",
|
||||
"pool:Skia"
|
||||
],
|
||||
"execution_timeout_ns": 1200000000000,
|
||||
"io_timeout_ns": 1200000000000,
|
||||
"execution_timeout_ns": 3600000000000,
|
||||
"io_timeout_ns": 3600000000000,
|
||||
"isolate": "compile.isolate",
|
||||
"max_attempts": 1,
|
||||
"outputs": [
|
||||
|
@ -117,7 +117,7 @@ echo "Compiling bitcode"
|
||||
${GN_GPU} \
|
||||
${GN_FONT} \
|
||||
skia_use_expat=true \
|
||||
skia_enable_ccpr=false \
|
||||
skia_enable_ccpr=true \
|
||||
skia_enable_svg=true \
|
||||
skia_enable_skshaper=true \
|
||||
skia_enable_nvpr=false \
|
||||
@ -144,10 +144,12 @@ if [[ `uname` != "Linux" ]]; then
|
||||
fi
|
||||
|
||||
GMS_TO_BUILD="gm/*.cpp"
|
||||
TESTS_TO_BUILD="tests/*.cpp"
|
||||
# When developing locally, it can be faster to focus only on the gms or tests you care about
|
||||
# (since they all have to be recompiled/relinked) every time. To do so, mark the following as true
|
||||
if false; then
|
||||
GMS_TO_BUILD="gm/beziereffects.cpp gm/gm.cpp"
|
||||
GMS_TO_BUILD="gm/bleed.cpp gm/gm.cpp"
|
||||
TESTS_TO_BUILD="tests/OctoBoundsTest.cpp tests/Test.cpp"
|
||||
fi
|
||||
|
||||
# These gms do not compile or link with the WASM code. Thus, we omit them.
|
||||
@ -155,7 +157,28 @@ GLOBIGNORE="gm/cgms.cpp:"\
|
||||
"gm/compressed_textures.cpp:"\
|
||||
"gm/fiddle.cpp:"\
|
||||
"gm/xform.cpp:"\
|
||||
"gm/video_decoder.cpp"
|
||||
"gm/video_decoder.cpp:"
|
||||
|
||||
# These tests do not compile with the WASM code (require other deps).
|
||||
GLOBIGNORE+="tests/CodecTest.cpp:"\
|
||||
"tests/ColorSpaceTest.cpp:"\
|
||||
"tests/DrawOpAtlasTest.cpp:"\
|
||||
"tests/EncodeTest.cpp:"\
|
||||
"tests/FontMgrAndroidParserTest.cpp:"\
|
||||
"tests/FontMgrFontConfigTest.cpp:"\
|
||||
"tests/SkVMTest.cpp:"
|
||||
|
||||
# These tests do complex things with TestContexts, which is not easily supported for the WASM
|
||||
# test harness. Thus we omit them.
|
||||
GLOBIGNORE+="tests/BackendAllocationTest.cpp:"\
|
||||
"tests/EGLImageTest.cpp:"\
|
||||
"tests/ImageTest.cpp:"\
|
||||
"tests/SurfaceSemaphoreTest.cpp:"\
|
||||
"tests/TextureBindingsResetTest.cpp:"\
|
||||
"tests/VkHardwareBufferTest.cpp:"
|
||||
|
||||
# All the tests in these files crash.
|
||||
GLOBIGNORE+="tests/GrThreadSafeCacheTest.cpp"
|
||||
|
||||
# Emscripten prefers that the .a files go last in order, otherwise, it
|
||||
# may drop symbols that it incorrectly thinks aren't used. One day,
|
||||
@ -168,12 +191,15 @@ EMCC_DEBUG=1 ${EMCXX} \
|
||||
-DGR_OP_ALLOCATE_USE_NEW \
|
||||
$WASM_GPU \
|
||||
-std=c++17 \
|
||||
--profiling-funcs \
|
||||
--profiling \
|
||||
--bind \
|
||||
--no-entry \
|
||||
--pre-js $BASE_DIR/gm.js \
|
||||
tools/Resources.cpp \
|
||||
$BASE_DIR/gm_bindings.cpp \
|
||||
$GMS_TO_BUILD \
|
||||
$TESTS_TO_BUILD \
|
||||
$GM_LIB \
|
||||
$BUILD_DIR/libskshaper.a \
|
||||
$BUILD_DIR/libsvg.a \
|
||||
|
@ -18,12 +18,14 @@
|
||||
#include "include/core/SkImageInfo.h"
|
||||
#include "include/core/SkStream.h"
|
||||
#include "include/core/SkSurface.h"
|
||||
#include "include/gpu/GrContextOptions.h"
|
||||
#include "include/gpu/GrDirectContext.h"
|
||||
#include "include/gpu/gl/GrGLInterface.h"
|
||||
#include "include/gpu/gl/GrGLTypes.h"
|
||||
#include "modules/canvaskit/WasmCommon.h"
|
||||
#include "src/core/SkFontMgrPriv.h"
|
||||
#include "src/core/SkMD5.h"
|
||||
#include "tests/Test.h"
|
||||
#include "tools/HashAndEncode.h"
|
||||
#include "tools/ResourceFactory.h"
|
||||
#include "tools/flags/CommandLineFlags.h"
|
||||
@ -198,6 +200,138 @@ static JSObject RunGM(sk_sp<GrDirectContext> ctx, std::string name) {
|
||||
return result;
|
||||
}
|
||||
|
||||
static JSArray ListTests() {
|
||||
SkDebugf("Listing Tests\n");
|
||||
JSArray tests = emscripten::val::array();
|
||||
for (auto test : skiatest::TestRegistry::Range()) {
|
||||
SkDebugf("test %s\n", test.name);
|
||||
tests.call<void>("push", std::string(test.name));
|
||||
}
|
||||
return tests;
|
||||
}
|
||||
|
||||
static skiatest::Test getTestWithName(std::string name, bool* ok) {
|
||||
for (auto test : skiatest::TestRegistry::Range()) {
|
||||
if (name == test.name) {
|
||||
*ok = true;
|
||||
return test;
|
||||
}
|
||||
}
|
||||
*ok = false;
|
||||
return skiatest::Test(nullptr, false, nullptr);
|
||||
}
|
||||
|
||||
// Based on DM.cpp:run_test
|
||||
struct WasmReporter : public skiatest::Reporter {
|
||||
WasmReporter(std::string name, JSObject result): fName(name), fResult(result){}
|
||||
|
||||
void reportFailed(const skiatest::Failure& failure) override {
|
||||
SkDebugf("Test %s failed: %s\n", fName.c_str(), failure.toString().c_str());
|
||||
fResult.set("result", "failed");
|
||||
fResult.set("msg", failure.toString().c_str());
|
||||
}
|
||||
std::string fName;
|
||||
JSObject fResult;
|
||||
};
|
||||
|
||||
/**
|
||||
* Runs the given Test and returns a JS object. If the Test was located, the object will have the
|
||||
* following properties:
|
||||
* "result" : One of "passed", "failed", "skipped".
|
||||
* "msg": May be non-empty on failure
|
||||
*/
|
||||
static JSObject RunTest(std::string name) {
|
||||
JSObject result = emscripten::val::object();
|
||||
bool ok = false;
|
||||
auto test = getTestWithName(name, &ok);
|
||||
if (!ok) {
|
||||
SkDebugf("Could not find test with name %s\n", name.c_str());
|
||||
return result;
|
||||
}
|
||||
GrContextOptions grOpts;
|
||||
if (test.needsGpu) {
|
||||
result.set("result", "passed"); // default to passing - the reporter will mark failed.
|
||||
WasmReporter reporter(name, result);
|
||||
test.run(&reporter, grOpts);
|
||||
return result;
|
||||
}
|
||||
|
||||
result.set("result", "passed"); // default to passing - the reporter will mark failed.
|
||||
WasmReporter reporter(name, result);
|
||||
test.run(&reporter, grOpts);
|
||||
return result;
|
||||
}
|
||||
|
||||
namespace skiatest {
|
||||
|
||||
class WasmContextInfo : public sk_gpu_test::ContextInfo {
|
||||
public:
|
||||
WasmContextInfo(GrDirectContext* context,
|
||||
const GrContextOptions& options)
|
||||
: fContext(context), fOptions(options) {}
|
||||
|
||||
GrDirectContext* directContext() const { return fContext; }
|
||||
sk_gpu_test::TestContext* testContext() const { return nullptr; }
|
||||
|
||||
sk_gpu_test::GLTestContext* glContext() const { return nullptr; }
|
||||
|
||||
const GrContextOptions& options() const { return fOptions; }
|
||||
private:
|
||||
GrDirectContext* fContext = nullptr;
|
||||
GrContextOptions fOptions;
|
||||
};
|
||||
|
||||
using ContextType = sk_gpu_test::GrContextFactory::ContextType;
|
||||
|
||||
// These are the supported GrContextTypeFilterFn
|
||||
bool IsGLContextType(ContextType ct) {
|
||||
return GrBackendApi::kOpenGL == sk_gpu_test::GrContextFactory::ContextTypeBackend(ct);
|
||||
}
|
||||
bool IsRenderingGLContextType(ContextType ct) {
|
||||
return IsGLContextType(ct) && sk_gpu_test::GrContextFactory::IsRenderingContext(ct);
|
||||
}
|
||||
bool IsRenderingGLOrMetalContextType(ContextType ct) {
|
||||
return IsRenderingGLContextType(ct);
|
||||
}
|
||||
bool IsMockContextType(ContextType ct) {
|
||||
return ct == ContextType::kMock_ContextType;
|
||||
}
|
||||
// These are not supported
|
||||
bool IsVulkanContextType(ContextType) {return false;}
|
||||
bool IsMetalContextType(ContextType) {return false;}
|
||||
bool IsDirect3DContextType(ContextType) {return false;}
|
||||
bool IsDawnContextType(ContextType) {return false;}
|
||||
|
||||
void RunWithGPUTestContexts(GrContextTestFn* test, GrContextTypeFilterFn* contextTypeFilter,
|
||||
Reporter* reporter, const GrContextOptions& options) {
|
||||
for (auto contextType : {ContextType::kGLES_ContextType, ContextType::kMock_ContextType}) {
|
||||
if (contextTypeFilter && !(*contextTypeFilter)(contextType)) {
|
||||
continue;
|
||||
}
|
||||
sk_sp<GrDirectContext> ctx = (contextType == ContextType::kGLES_ContextType) ?
|
||||
GrDirectContext::MakeGL(options) :
|
||||
GrDirectContext::MakeMock(nullptr, options);
|
||||
if (!ctx) {
|
||||
SkDebugf("Could not make context\n");
|
||||
return;
|
||||
}
|
||||
WasmContextInfo ctxInfo(ctx.get(), options);
|
||||
|
||||
// From DMGpuTestProcs.cpp
|
||||
(*test)(reporter, ctxInfo);
|
||||
// Sync so any release/finished procs get called.
|
||||
ctxInfo.directContext()->flushAndSubmit(/*sync*/true);
|
||||
}
|
||||
}
|
||||
} // namespace skiatest
|
||||
|
||||
namespace sk_gpu_test {
|
||||
GLTestContext *CreatePlatformGLTestContext(GrGLStandard forcedGpuAPI,
|
||||
GLTestContext *shareContext) {
|
||||
return nullptr;
|
||||
}
|
||||
} // namespace sk_gpu_test
|
||||
|
||||
void Init() {
|
||||
// Use the portable fonts.
|
||||
gSkFontMgr_DefaultFactory = &ToolUtils::MakePortableFontMgr;
|
||||
@ -206,10 +340,12 @@ void Init() {
|
||||
EMSCRIPTEN_BINDINGS(GMs) {
|
||||
function("Init", &Init);
|
||||
function("ListGMs", &ListGMs);
|
||||
function("ListTests", &ListTests);
|
||||
function("LoadKnownDigest", &LoadKnownDigest);
|
||||
function("_LoadResource", &LoadResource);
|
||||
function("MakeGrContext", &MakeGrContext);
|
||||
function("RunGM", &RunGM);
|
||||
function("RunTest", &RunTest);
|
||||
|
||||
class_<GrDirectContext>("GrDirectContext")
|
||||
.smart_ptr<sk_sp<GrDirectContext>>("sk_sp<GrDirectContext>");
|
||||
|
@ -1,113 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<title>GMs and unit tests against WASM/WebGL</title>
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style>
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
#debug_canvas {
|
||||
/* Same checkboard pattern as is on debugger.skia.org, just a little darker. */
|
||||
background-position: 0 0, 10px 10px;
|
||||
background-size: 20px 20px;
|
||||
background-image: linear-gradient(45deg, #CCC 25%, transparent 25%, transparent 75%, #CCC 75%, #CCC 100%),
|
||||
linear-gradient(45deg, #CCC 25%, white 25%, white 75%, #CCC 75%, #CCC 100%);
|
||||
}
|
||||
</style>
|
||||
|
||||
<canvas id=debug_canvas height=1000 width=1000></canvas>
|
||||
|
||||
<canvas id=gm_canvas></canvas>
|
||||
|
||||
<script type="text/javascript" src="/out/wasm_gm_tests.js"></script>
|
||||
|
||||
<script type="text/javascript" charset="utf-8">
|
||||
const loadTests = InitWasmGMTests({
|
||||
locateFile: (file) => '/out/'+file,
|
||||
});
|
||||
|
||||
Promise.all([loadTests]).then(([GM]) => {
|
||||
RunGMs(GM);
|
||||
});
|
||||
|
||||
function RunGMs(GM) {
|
||||
const canvas = document.getElementById('gm_canvas');
|
||||
const ctx = GM.GetWebGLContext(canvas, 2);
|
||||
const grcontext = GM.MakeGrContext(ctx);
|
||||
requestAnimationFrame(drawQueuedPNGs);
|
||||
|
||||
const names = GM.ListGMs();
|
||||
names.sort();
|
||||
for (const name of names) {
|
||||
const pngAndHash = GM.RunGM(grcontext, name);
|
||||
if (!pngAndHash) {
|
||||
continue;
|
||||
}
|
||||
drawDebugPNG(pngAndHash.png);
|
||||
// We need to know the digest of the image as well as which gm produced it.
|
||||
// As such, we include both parts in the name.
|
||||
outputPNG(pngAndHash.png, pngAndHash.hash + '_' + name + '.png');
|
||||
}
|
||||
|
||||
grcontext.delete();
|
||||
}
|
||||
|
||||
const msPerGM = 500;
|
||||
let timeSinceLastPNGSwapped = 0;
|
||||
const queuedDebugPNGs = [];
|
||||
|
||||
// This decodes the given PNG and queues it up to be drawn. Because decoding the image
|
||||
// (createImageBitmap) is asynchronous, we queue this in a list and have a drawing loop that
|
||||
// occasionally pulls the next image off the queue to be displayed to the human. That way we
|
||||
// have a minimum amount of time an image is seen so the human can casually inspect the outputs
|
||||
// as they are generated.
|
||||
function drawDebugPNG(pngBytes) {
|
||||
const blob = new Blob([pngBytes], {type: 'image/png'});
|
||||
createImageBitmap(blob).then((bitmap) => {
|
||||
queuedDebugPNGs.push(bitmap);
|
||||
});
|
||||
}
|
||||
|
||||
function drawQueuedPNGs() {
|
||||
requestAnimationFrame(drawQueuedPNGs);
|
||||
if (!queuedDebugPNGs.length) {
|
||||
return; // no new image to show
|
||||
}
|
||||
if ((Date.now() - timeSinceLastPNGSwapped) < msPerGM) {
|
||||
return; // not been displayed long enough.
|
||||
}
|
||||
// Draw the first image in the queue.
|
||||
const bitmap = queuedDebugPNGs.shift();
|
||||
|
||||
const debugCanvas = document.getElementById('debug_canvas');
|
||||
debugCanvas.width = bitmap.width;
|
||||
debugCanvas.height = bitmap.height;
|
||||
|
||||
const ctx = debugCanvas.getContext('2d');
|
||||
ctx.clearRect(0, 0, 1000, 1000);
|
||||
ctx.drawImage(bitmap, 0, 0);
|
||||
timeSinceLastPNGSwapped = Date.now();
|
||||
}
|
||||
|
||||
// This triggers a download of the created PNG using the provided filename. For a production
|
||||
// testing environment, it will probably be good to swap this out with a webserver because it
|
||||
// might not be easy to determine where the download folder for a given browser is.
|
||||
function outputPNG(pngBytes, fileName) {
|
||||
// https://stackoverflow.com/a/32094834
|
||||
const blob = new Blob([pngBytes], {type: 'image/png'});
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
document.body.appendChild(a);
|
||||
a.href = url;
|
||||
a.download = fileName;
|
||||
a.click();
|
||||
// clean up after because FF might not download it synchronously
|
||||
setTimeout(function() {
|
||||
URL.revokeObjectURL(url);
|
||||
a.remove();
|
||||
}, 50);
|
||||
}
|
||||
|
||||
</script>
|
@ -2,4 +2,9 @@ run_local:
|
||||
rm -rf /tmp/wasm-gmtests/
|
||||
mkdir /tmp/wasm-gmtests/
|
||||
touch /tmp/wasm-gmtests/empty.txt
|
||||
node run-wasm-gm-tests --js_file ../../out/wasm_gm_tests/wasm_gm_tests.js --wasm_file ../../out/wasm_gm_tests/wasm_gm_tests.wasm --resources ../../resources --known_hashes /tmp/wasm-gmtests/empty.txt --output /tmp/wasm-gmtests/ --use_gpu --timeout 60
|
||||
node run-wasm-gm-tests --js_file ../../out/wasm_gm_tests/wasm_gm_tests.js \
|
||||
--wasm_file ../../out/wasm_gm_tests/wasm_gm_tests.wasm \
|
||||
--resources ../../resources \
|
||||
--known_hashes /tmp/wasm-gmtests/empty.txt \
|
||||
--output /tmp/wasm-gmtests/ \
|
||||
--use_gpu --timeout 600
|
||||
|
@ -57,6 +57,11 @@ array of the test names and what they drew.
|
||||
document.getElementById('start_tests').addEventListener('click', async () => {
|
||||
window._testsProgress = 0;
|
||||
window._log = 'Starting\n';
|
||||
window._failed = [];
|
||||
await RunTests(GM);
|
||||
if (window._error) {
|
||||
return;
|
||||
}
|
||||
await RunGMs(GM);
|
||||
if (attemptedPOSTs !== successfulPOSTs) {
|
||||
window._error = `Failed to POST all the PNG files (expected ${attemptedPOSTs}, finished ${successfulPOSTs})`;
|
||||
@ -100,7 +105,7 @@ array of the test names and what they drew.
|
||||
log('hashes loaded');
|
||||
}
|
||||
|
||||
const skip_list = new Set([
|
||||
const gmSkipList = new Set([
|
||||
// gm names can be added here to skip, if failing.
|
||||
]);
|
||||
|
||||
@ -113,16 +118,16 @@ array of the test names and what they drew.
|
||||
const names = GM.ListGMs();
|
||||
names.sort();
|
||||
for (const name of names) {
|
||||
if (skip_list.has(name)) {
|
||||
if (gmSkipList.has(name)) {
|
||||
continue;
|
||||
}
|
||||
window._log += `Starting ${name}\n`;
|
||||
log(`Starting GM ${name}`);
|
||||
const pngAndMetadata = GM.RunGM(grcontext, name);
|
||||
if (!pngAndMetadata || !pngAndMetadata.hash) {
|
||||
console.debug('No output for ', name);
|
||||
continue; // Was skipped
|
||||
}
|
||||
log(`Test ${name} drew ${pngAndMetadata.hash}`);
|
||||
log(` drew ${pngAndMetadata.hash}`);
|
||||
window._results.push({
|
||||
name: name,
|
||||
digest: pngAndMetadata.hash,
|
||||
@ -154,6 +159,228 @@ array of the test names and what they drew.
|
||||
console.error('Could not post PNG', e);
|
||||
});
|
||||
}
|
||||
|
||||
const testSkipList = new Set([
|
||||
// These tests all crash https://bugs.chromium.org/p/skia/issues/detail?id=10869
|
||||
'AsyncReadPixelsContextShutdown',
|
||||
'BulkFillRectTest',
|
||||
'BulkTextureRectTest',
|
||||
'CharacterizationFBO0nessTest',
|
||||
'ClearOp',
|
||||
'CompressedBackendAllocationTest',
|
||||
'CopySurface',
|
||||
'DDLCompatibilityTest',
|
||||
'DDLCreateCharacterizationFailures',
|
||||
'DDLInvalidRecorder',
|
||||
'DDLMakeRenderTargetTest',
|
||||
'DDLMultipleDDLs',
|
||||
'DDLNonTextureabilityTest',
|
||||
'DDLOperatorEqTest',
|
||||
'DDLSkSurfaceFlush',
|
||||
'DDLSurfaceCharacterizationTest',
|
||||
'DDLTextureFlagsTest',
|
||||
'DDLWrapBackendTest',
|
||||
'Data',
|
||||
'DeferredProxyTest',
|
||||
'DefferredProxyConversionTest',
|
||||
'ES2BlendWithNoTexture',
|
||||
'ExtendedSkColorTypeTests_gpu',
|
||||
'FlushFinishedProcTest',
|
||||
'FlushSubmittedProcTest',
|
||||
'FullScreenClearWithLayers',
|
||||
'GLTextureParameters',
|
||||
'GPUMemorySize',
|
||||
'GrClipStack_SWMask',
|
||||
'GrContextFactory_abandon',
|
||||
'GrContextFactory_executorAndTaskGroup',
|
||||
'GrContextFactory_sharedContexts',
|
||||
'GrContextDump',
|
||||
'GrContext_abandonContext',
|
||||
'GrContext_colorTypeSupportedAsSurface',
|
||||
'GrContext_maxSurfaceSamplesForColorType',
|
||||
'GrContext_oomed',
|
||||
'GrDDLImage_MakeSubset',
|
||||
'GrDefaultPathRendererTest',
|
||||
'GrDrawCollapsedPath',
|
||||
'GrMeshTest',
|
||||
'GrPipelineDynamicStateTest',
|
||||
'GrPathKeys',
|
||||
'GrStyledShape',
|
||||
'GrSurface',
|
||||
'GrSurfaceRenderability',
|
||||
'GrTestingBackendTextureUploadTest',
|
||||
'GrTextBlobScaleAnimation',
|
||||
'GrUploadPixelsTests',
|
||||
'HalfFloatAlphaTextureTest',
|
||||
'HalfFloatRGBATextureTest',
|
||||
'ImageAsyncReadPixels',
|
||||
'ImageFilterBlurLargeImage_Gpu',
|
||||
'ImageFilterCache_GPUBacked',
|
||||
'ImageFilterCache_ImageBackedGPU',
|
||||
'ImageFilterHugeBlur_Gpu',
|
||||
'ImageIsOpaqueTest_Gpu',
|
||||
'ImageNewShader_GPU',
|
||||
'InitialTextureClear',
|
||||
'MatrixColorFilter_TransparentBlack',
|
||||
'OnFlushCallbackTest',
|
||||
'OverbudgetFlush',
|
||||
'OverdrawSurface_Gpu',
|
||||
'PinnedImageTest',
|
||||
'PreinstantiatedProxyConversionTest',
|
||||
'PremulAlphaRoundTrip_Gpu',
|
||||
'ProcessorCloneTest',
|
||||
'ProcessorOptimizationValidationTest',
|
||||
'ProcessorRefTest',
|
||||
'Programs',
|
||||
'PromiseImageNullFulfill',
|
||||
'PromiseImageTest',
|
||||
'PromiseImageTextureFullCache',
|
||||
'PromiseImageTextureShutdown',
|
||||
'ProxyRefTest',
|
||||
'RGB565TextureTest',
|
||||
'RGBA4444TextureTest',
|
||||
'ReadOnlyTexture',
|
||||
'ReadPixels_Texture',
|
||||
'ReadWriteAlpha',
|
||||
'RectangleTexture',
|
||||
'RefCnt',
|
||||
'RenderTargetContextTest',
|
||||
'ReplaceSurfaceBackendTexture',
|
||||
'ResourceAllocatorCurOpsTaskIndexTest',
|
||||
'ResourceAllocatorOverBudgetTest',
|
||||
'ResourceAllocatorStressTest',
|
||||
'ResourceAllocatorTest',
|
||||
'ResourceCacheCache',
|
||||
'ResourceCacheStencilBuffers',
|
||||
'ResourceMessagesAfterAbandon',
|
||||
'SRGBReadWritePixels',
|
||||
'SkRemoteGlyphCache_CacheMissReporting',
|
||||
'SkRemoteGlyphCache_DrawTextAsDFT',
|
||||
'SkRemoteGlyphCache_DrawTextAsPath',
|
||||
'SkRemoteGlyphCache_DrawTextXY',
|
||||
'SkRemoteGlyphCache_StrikeSerialization',
|
||||
'SkRemoteGlyphCache_TypefaceWithNoPaths',
|
||||
'SkRuntimeEffectThreaded',
|
||||
'SkSLCross',
|
||||
'SkScalerCacheMultiThread',
|
||||
'SkTraceMemoryDump_ownedGLBuffer',
|
||||
'SkTraceMemoryDump_ownedGLTexture',
|
||||
'SkTraceMemoryDump_unownedGLRenderTarget',
|
||||
'SkTraceMemoryDump_unownedGLTexture',
|
||||
'SmallBoxBlurBug',
|
||||
'SpecialImage_GPUDevice',
|
||||
'SpecialImage_Gpu',
|
||||
'SpecialSurface_Gpu1',
|
||||
'SrcSrcOverBatchTest',
|
||||
'Stream',
|
||||
'StreamBuffer',
|
||||
'StreamPeek',
|
||||
'String_Threaded',
|
||||
'SurfaceAttachStencil_Gpu',
|
||||
'SurfaceBackendHandleAccessIDs_Gpu',
|
||||
'SurfaceBackendSurfaceAccessCopyOnWrite_Gpu',
|
||||
'SurfaceBudget',
|
||||
'SurfaceCRBug263329_Gpu',
|
||||
'SurfaceCanvasPeek_Gpu',
|
||||
'SurfaceCopyOnWrite_Gpu',
|
||||
'SurfaceNoCanvas_Gpu',
|
||||
'SurfacePartialDraw_Gpu',
|
||||
'SurfacepeekTexture_Gpu',
|
||||
'SurfaceWrappedWithRelease_Gpu',
|
||||
'SurfaceWriteableAfterSnapshotRelease_Gpu',
|
||||
'TextBlobJaggedGlyph',
|
||||
'TextBlobSmoothScroll',
|
||||
'TextBlobStressAbnormal',
|
||||
'TextBlobStressCache',
|
||||
'TextureIdleProcCacheManipulationTest',
|
||||
'TextureIdleProcFlushTest',
|
||||
'TextureIdleProcRerefTest',
|
||||
'TextureIdleProcTest',
|
||||
'TextureIdleStateTest',
|
||||
'TextureProxyTest',
|
||||
'TextureStripAtlasManagerColorFilterTest',
|
||||
'TextureStripAtlasManagerGradientTest',
|
||||
'TriangulatingPathRendererTests',
|
||||
'VertexAttributeCount',
|
||||
'WrappedProxyTest',
|
||||
'WritePixelsNonTextureMSAA_Gpu',
|
||||
'WritePixelsNonTexture_Gpu',
|
||||
'WritePixelsPendingIO',
|
||||
'XfermodeImageFilterCroppedInput_Gpu',
|
||||
'ZeroSizedProxyTest',
|
||||
'skbug5221_GPU',
|
||||
|
||||
// These tests are failing
|
||||
'ApplyGamma',
|
||||
'BlurMaskBiggerThanDest',
|
||||
'Codec_GifPreMap',
|
||||
'Codec_AnimatedTransparentGif',
|
||||
'FILEStreamWithOffset',
|
||||
'Gif',
|
||||
'RepeatedClippedBlurTest',
|
||||
'SkTraceMemoryDump_ownedGLRenderTarget',
|
||||
'SurfaceClear_Gpu',
|
||||
'SurfaceSnapshotAlphaType_Gpu',
|
||||
'TestGpuAllContexts',
|
||||
'TestGpuRenderingContexts',
|
||||
'TestMockContext',
|
||||
'TextBlobAbnormal',
|
||||
'TextBlobCache'
|
||||
]);
|
||||
|
||||
async function RunTests(GM) {
|
||||
const canvas = document.getElementById('gm_canvas');
|
||||
const ctx = GM.GetWebGLContext(canvas, 2);
|
||||
// This sets up the GL context for all tests.
|
||||
const grcontext = GM.MakeGrContext(ctx);
|
||||
if (!grcontext) {
|
||||
window._error = 'Could not make GrContext for tests';
|
||||
return;
|
||||
}
|
||||
// We run these tests in batchs so as not to lock up the main thread, which makes it easier
|
||||
// to read the progress as well as making the page more responsive when debugging.
|
||||
await new Promise((resolve, reject) => {
|
||||
const names = GM.ListTests();
|
||||
names.sort();
|
||||
console.log(names);
|
||||
let testIdx = -1;
|
||||
const nextBatch = () => {
|
||||
for (let i = 0; i < 10 && testIdx < names.length; i++) {
|
||||
testIdx++;
|
||||
const name = names[testIdx];
|
||||
if (!name) {
|
||||
testIdx = names.length;
|
||||
break;
|
||||
}
|
||||
if (testSkipList.has(name)) {
|
||||
continue;
|
||||
}
|
||||
log(`Running test ${name}`);
|
||||
try {
|
||||
const result = GM.RunTest(name);
|
||||
log(' ' + result.result, result.msg || '');
|
||||
if (result.result !== 'passed' && result.result !== 'skipped') {
|
||||
window._failed.push(name);
|
||||
}
|
||||
} catch (e) {
|
||||
log(`${name} crashed with ${e.toString()} ${e.stack}`);
|
||||
window._error = e.toString();
|
||||
reject();
|
||||
return;
|
||||
}
|
||||
window._testsProgress++;
|
||||
}
|
||||
if (testIdx >= names.length) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
setTimeout(nextBatch);
|
||||
};
|
||||
setTimeout(nextBatch);
|
||||
});
|
||||
|
||||
grcontext.delete();
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -223,6 +223,7 @@ async function driveBrowser() {
|
||||
process.exit(1);
|
||||
}
|
||||
console.log("Loading " + targetURL);
|
||||
let failed = [];
|
||||
try {
|
||||
await page.goto(targetURL, {
|
||||
timeout: 60000,
|
||||
@ -238,7 +239,9 @@ async function driveBrowser() {
|
||||
|
||||
const err = await page.evaluate('window._error');
|
||||
if (err) {
|
||||
console.log(`ERROR: ${err}`);
|
||||
const log = await page.evaluate('window._log');
|
||||
console.info(log);
|
||||
console.error(`ERROR: ${err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -250,7 +253,7 @@ async function driveBrowser() {
|
||||
let batch = batchSize;
|
||||
while (true) {
|
||||
console.log(`Waiting ${options.timeout}s for ${batchSize} tests to complete`);
|
||||
await page.waitForFunction(`(window._testsProgress >= ${batch}) || (window._testsDone === true) || window._error`, {
|
||||
await page.waitForFunction(`(window._testsProgress >= ${batch}) || window._testsDone || window._error`, {
|
||||
timeout: options.timeout*1000,
|
||||
});
|
||||
const progress = await page.evaluate(() => {
|
||||
@ -261,7 +264,9 @@ async function driveBrowser() {
|
||||
};
|
||||
});
|
||||
if (progress.err) {
|
||||
console.log(`ERROR: ${progress.err}`);
|
||||
const log = await page.evaluate('window._log');
|
||||
console.info(log);
|
||||
console.error(`ERROR: ${progress.err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
if (progress.done) {
|
||||
@ -271,9 +276,12 @@ async function driveBrowser() {
|
||||
console.log(`In Progress; completed ${progress.count} tests.`)
|
||||
batch = progress.count + batchSize;
|
||||
}
|
||||
|
||||
const goldResults = await page.evaluate('window._results');
|
||||
console.debug(goldResults);
|
||||
failed = await(page.evaluate('window._failed'));
|
||||
|
||||
const log = await page.evaluate('window._log');
|
||||
console.info(log);
|
||||
|
||||
|
||||
const jsonFile = path.join(options.output, 'gold_results.json');
|
||||
fs.writeFileSync(jsonFile, JSON.stringify(goldResults));
|
||||
@ -290,6 +298,13 @@ async function driveBrowser() {
|
||||
|
||||
await browser.close();
|
||||
await new Promise((resolve) => server.close(resolve));
|
||||
|
||||
if (failed.length > 0) {
|
||||
console.error('Failed tests', failed);
|
||||
process.exit(1);
|
||||
} else {
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
driveBrowser();
|
||||
|
Loading…
Reference in New Issue
Block a user