[canvaskit] Clean up readPixels API on Canvas and Image

This makes both APIs have the same arguments with the two
source coordinates first and all the destination params
(image info, optional buffer, optional rowBytes) after.

Bug: skia:10717
Change-Id: I483e4f33f24e226793db6113d5ba5b1955cd892e
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/332622
Reviewed-by: Mike Reed <reed@google.com>
This commit is contained in:
Kevin Lubick 2020-11-06 13:05:37 -05:00
parent 15b95d6f6c
commit b8123cc877
7 changed files with 172 additions and 157 deletions

View File

@ -17,6 +17,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Breaking
- `CanvasKit.MakePathFromSVGString` was renamed to `CanvasKit.Path.MakeFromSVGString`
- `CanvasKit.MakePathFromOp` was renamed to `CanvasKit.Path.MakeFromOp`
- The API for `Canvas.readPixels` and `Image.readPixels` has been reworked to more accurately
reflect the C++ backend and each other. bytesPerRow is now a required parameter. They take an
ImageInfo object to specify the output format. Additionally they take an optional malloc'd
object as the last parameter. If provided, the data will be copied into there instead of
allocating a new buffer.
### Changed
- We now compile CanvasKit with emsdk 2.0.6 when testing and deploying to npm.
@ -24,8 +29,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- `CanvasKit.Shader.Blend`, `...Color`, and `...Lerp` have been renamed to
`CanvasKit.Shader.MakeBlend`, `...MakeColor` and `...MakeLerp` to align with naming conventions.
The old names will be removed in an upcoming release.
- `readPixels` now takes a malloc'd object as the last parameter. If provided, the data will be
copied into there instead of allocating a new buffer.
### Removed
- `CanvasKit.MakePathFromCmds`; Was deprecated in favor of `CanvasKit.Path.MakeFromCmds`.

View File

@ -138,12 +138,21 @@ function canvasTests(CK: CanvasKit, canvas?: Canvas, paint?: Paint, path?: Path,
const matrThree = canvas.getTotalMatrix(); // $ExpectType number[]
const surface = canvas.makeSurface(imageInfo); // $ExpectType Surface | null
canvas.markCTM('more ctm');
const pixels = canvas.readPixels(0, 1, 2, 3); // $ExpectType Uint8Array
const pixelsTwo = canvas.readPixels(4, 5, 6, 7, CK.AlphaType.Opaque, CK.ColorType.RGBA_1010102,
CK.ColorSpace.DISPLAY_P3, 16);
const m = CK.Malloc(Uint8Array, 20);
canvas.readPixels(4, 5, 6, 7, CK.AlphaType.Opaque, CK.ColorType.RGBA_1010102,
CK.ColorSpace.DISPLAY_P3, 16, m);
const pixels = canvas.readPixels(85, 1000, {// $Uint8Array | Float32Array | null
width: 79,
height: 205,
colorType: CK.ColorType.RGBA_8888,
alphaType: CK.AlphaType.Unpremul,
colorSpace: CK.ColorSpace.SRGB,
});
const m = CK.Malloc(Uint8Array, 10);
img.readPixels(85, 1000, {
width: 79,
height: 205,
colorType: CK.ColorType.RGBA_8888,
alphaType: CK.AlphaType.Unpremul,
colorSpace: CK.ColorSpace.SRGB,
}, m, 4 * 85);
canvas.restore();
canvas.restoreToCount(2);
canvas.rotate(1, 2, 3);
@ -235,21 +244,21 @@ function imageTests(CK: CanvasKit, imgElement?: HTMLImageElement) {
const h = img.height();
const w = img.width();
const shader = img.makeShader(CK.TileMode.Decal, CK.TileMode.Repeat); // $ExpectType Shader
const pixels = img.readPixels({
const pixels = img.readPixels(85, 1000, {// $Uint8Array | Float32Array | null
width: 79,
height: 205,
colorType: CK.ColorType.RGBA_8888,
alphaType: CK.AlphaType.Unpremul,
colorSpace: CK.ColorSpace.SRGB,
}, 85, 1000);
});
const m = CK.Malloc(Uint8Array, 10);
img.readPixels({
img.readPixels(85, 1000, {
width: 79,
height: 205,
colorType: CK.ColorType.RGBA_8888,
alphaType: CK.AlphaType.Unpremul,
colorSpace: CK.ColorSpace.SRGB,
}, 85, 1000, m);
}, m, 4 * 85);
img.delete();
}

View File

@ -1232,22 +1232,27 @@ export interface Canvas extends EmbindObject<Canvas> {
markCTM(marker: string): void;
/**
* Copies the given rectangle of pixels into a new Uint8Array and returns it. If alphaType,
* colorType, and colorSpace are provided, those will describe the output format.
* @param x
* @param y
* @param w
* @param h
* @param alphaType - defaults to Unpremul
* @param colorType - defaults to RGBA_8888
* @param colorSpace - defaults to SRGB
* @param dest - If provided, the pixels will be copied into the allocated buffer allowing access to the
* pixels without allocating a new TypedArray.
* @param dstRowBytes
* Returns a TypedArray containing the pixels reading starting at (srcX, srcY) and does not
* exceed the size indicated by imageInfo. See SkCanvas.h for more on the caveats.
*
* If dest is not provided, we allocate memory equal to the provided height * the provided
* bytesPerRow to fill the data with.
*
* This is generally a very expensive call for the GPU backend.
*
* @param srcX
* @param srcY
* @param imageInfo - describes the destination format of the pixels.
* @param dest - If provided, the pixels will be copied into the allocated buffer allowing
* access to the pixels without allocating a new TypedArray.
* @param bytesPerRow - number of bytes per row. Must be provided if dest is set. This
* depends on destination ColorType. For example, it must be at least 4 * width for
* the 8888 color type.
* @returns a TypedArray appropriate for the specified ColorType. Note that 16 bit floats are
* not supported in JS, so that colorType corresponds to raw bytes Uint8Array.
*/
readPixels(x: number, y: number, w: number, h: number, alphaType?: AlphaType,
colorType?: ColorType, colorSpace?: ColorSpace, dstRowBytes?: number,
dest?: MallocObj): Uint8Array;
readPixels(srcX: number, srcY: number, imageInfo: ImageInfo, dest?: MallocObj,
bytesPerRow?: number): Uint8Array | Float32Array | null;
/**
* Removes changes to the current matrix and clip since Canvas state was
@ -1564,18 +1569,24 @@ export interface Image extends EmbindObject<Image> {
/**
* Returns a TypedArray containing the pixels reading starting at (srcX, srcY) and does not
* exceed the size indicated by imageInfo. See Image.h for more on the caveats.
* exceed the size indicated by imageInfo. See SkImage.h for more on the caveats.
*
* If dest is not provided, we allocate memory equal to the provided height * the provided
* bytesPerRow to fill the data with.
*
* @param imageInfo - describes the destination format of the pixels.
* @param srcX
* @param srcY
* @param dest - If provided, the pixels will be copied into the allocated buffer allowing access to the
* pixels without allocating a new TypedArray.
* @returns a Uint8Array if RGB_8888 was requested, Float32Array if RGBA_F32 was requested. null will be returned
* on any error.
*
* @param imageInfo - describes the destination format of the pixels.
* @param dest - If provided, the pixels will be copied into the allocated buffer allowing
* access to the pixels without allocating a new TypedArray.
* @param bytesPerRow - number of bytes per row. Must be provided if dest is set. This
* depends on destination ColorType. For example, it must be at least 4 * width for
* the 8888 color type.
* @returns a TypedArray appropriate for the specified ColorType. Note that 16 bit floats are
* not supported in JS, so that colorType corresponds to raw bytes Uint8Array.
*/
readPixels(imageInfo: ImageInfo, srcX: number, srcY: number, dest?: MallocObj): Uint8Array | Float32Array | null;
readPixels(srcX: number, srcY: number, imageInfo: ImageInfo, dest?: MallocObj,
bytesPerRow?: number): Uint8Array | Float32Array | null;
/**
* Return the width in pixels of the image.

View File

@ -48,7 +48,7 @@ function CanvasRenderingContext2D(skcanvas) {
});
// Don't delete this._canvas as it will be disposed
// by the surface of which it is based.
}
};
// This always accepts DOMMatrix/SVGMatrix or any other
// object that has properties a,b,c,d,e,f defined.
@ -491,29 +491,29 @@ function CanvasRenderingContext2D(skcanvas) {
this.arc = function(x, y, radius, startAngle, endAngle, ccw) {
arc(this._currentPath, x, y, radius, startAngle, endAngle, ccw);
}
};
this.arcTo = function(x1, y1, x2, y2, radius) {
arcTo(this._currentPath, x1, y1, x2, y2, radius);
}
};
// As per the spec this doesn't begin any paths, it only
// clears out any previous paths.
this.beginPath = function() {
this._currentPath.delete();
this._currentPath = new CanvasKit.Path();
}
};
this.bezierCurveTo = function(cp1x, cp1y, cp2x, cp2y, x, y) {
bezierCurveTo(this._currentPath, cp1x, cp1y, cp2x, cp2y, x, y);
}
};
this.clearRect = function(x, y, width, height) {
this._paint.setStyle(CanvasKit.PaintStyle.Fill);
this._paint.setBlendMode(CanvasKit.BlendMode.Clear);
this._canvas.drawRect(CanvasKit.XYWHRect(x, y, width, height), this._paint);
this._paint.setBlendMode(this._globalCompositeOperation);
}
};
this.clip = function(path, fillRule) {
if (typeof path === 'string') {
@ -535,11 +535,11 @@ function CanvasRenderingContext2D(skcanvas) {
}
this._canvas.clipPath(clip, CanvasKit.ClipOp.Intersect, true);
clip.delete();
}
};
this.closePath = function() {
closePath(this._currentPath);
}
};
this.createImageData = function() {
// either takes in 1 or 2 arguments:
@ -559,7 +559,7 @@ function CanvasRenderingContext2D(skcanvas) {
} else {
throw 'createImageData expects 1 or 2 arguments, got '+arguments.length;
}
}
};
this.createLinearGradient = function(x1, y1, x2, y2) {
if (!allAreFinite(arguments)) {
@ -568,13 +568,13 @@ function CanvasRenderingContext2D(skcanvas) {
var lcg = new LinearCanvasGradient(x1, y1, x2, y2);
this._toCleanUp.push(lcg);
return lcg;
}
};
this.createPattern = function(image, repetition) {
var cp = new CanvasPattern(image, repetition);
this._toCleanUp.push(cp);
return cp;
}
};
this.createRadialGradient = function(x1, y1, r1, x2, y2, r2) {
if (!allAreFinite(arguments)) {
@ -583,7 +583,7 @@ function CanvasRenderingContext2D(skcanvas) {
var rcg = new RadialCanvasGradient(x1, y1, r1, x2, y2, r2);
this._toCleanUp.push(rcg);
return rcg;
}
};
this._imagePaint = function() {
var iPaint = this._fillPaint();
@ -593,7 +593,7 @@ function CanvasRenderingContext2D(skcanvas) {
iPaint.setFilterQuality(this._imageFilterQuality);
}
return iPaint;
}
};
this.drawImage = function(img) {
// 3 potential sets of arguments
@ -618,13 +618,13 @@ function CanvasRenderingContext2D(skcanvas) {
this._canvas.drawImageRect(img, srcRect, destRect, iPaint, false);
iPaint.dispose();
}
};
this.ellipse = function(x, y, radiusX, radiusY, rotation,
startAngle, endAngle, ccw) {
ellipse(this._currentPath, x, y, radiusX, radiusY, rotation,
startAngle, endAngle, ccw);
}
};
// A helper to copy the current paint, ready for filling
// This applies the global alpha.
@ -646,9 +646,9 @@ function CanvasRenderingContext2D(skcanvas) {
// here. In any case, we have .dispose() to make _fillPaint behave
// like _strokePaint and _shadowPaint.
this.delete();
}
};
return paint;
}
};
this.fill = function(path, fillRule) {
if (typeof path === 'string') {
@ -681,7 +681,7 @@ function CanvasRenderingContext2D(skcanvas) {
}
this._canvas.drawPath(path, fillPaint);
fillPaint.dispose();
}
};
this.fillRect = function(x, y, width, height) {
var fillPaint = this._fillPaint();
@ -697,7 +697,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._canvas.drawRect(CanvasKit.XYWHRect(x, y, width, height), fillPaint);
fillPaint.dispose();
}
};
this.fillText = function(text, x, y, maxWidth) {
// TODO do something with maxWidth, probably involving measure
@ -715,10 +715,16 @@ function CanvasRenderingContext2D(skcanvas) {
this._canvas.drawTextBlob(blob, x, y, fillPaint);
blob.delete();
fillPaint.dispose();
}
};
this.getImageData = function(x, y, w, h) {
var pixels = this._canvas.readPixels(x, y, w, h);
var pixels = this._canvas.readPixels(x, y, {
'width': w,
'height': h,
'colorType': CanvasKit.ColorType.RGBA_8888,
'alphaType': CanvasKit.AlphaType.Unpremul,
'colorSpace': CanvasKit.ColorSpace.SRGB,
});
if (!pixels) {
return null;
}
@ -727,17 +733,17 @@ function CanvasRenderingContext2D(skcanvas) {
return new ImageData(
new Uint8ClampedArray(pixels.buffer),
w, h);
}
};
this.getLineDash = function() {
return this._lineDashList.slice();
}
};
this._mapToLocalCoordinates = function(pts) {
var inverted = CanvasKit.Matrix.invert(this._currentTransform);
CanvasKit.Matrix.mapPoints(inverted, pts);
return pts;
}
};
this.isPointInPath = function(x, y, fillmode) {
var args = arguments;
@ -766,7 +772,7 @@ function CanvasRenderingContext2D(skcanvas) {
CanvasKit.FillType.Winding :
CanvasKit.FillType.EvenOdd);
return path.contains(x, y);
}
};
this.isPointInStroke = function(x, y) {
var args = arguments;
@ -795,22 +801,22 @@ function CanvasRenderingContext2D(skcanvas) {
var retVal = temp.contains(x, y);
temp.delete();
return retVal;
}
};
this.lineTo = function(x, y) {
lineTo(this._currentPath, x, y);
}
};
this.measureText = function(text) {
return {
width: this._font.measureText(text),
// TODO other measurements?
}
}
};
this.moveTo = function(x, y) {
moveTo(this._currentPath, x, y);
}
};
this.putImageData = function(imageData, x, y, dirtyX, dirtyY, dirtyWidth, dirtyHeight) {
if (!allAreFinite([x, y, dirtyX, dirtyY, dirtyWidth, dirtyHeight])) {
@ -859,15 +865,15 @@ function CanvasRenderingContext2D(skcanvas) {
this._canvas.drawImageRect(img, src, dst, null, false);
this._canvas.restore();
img.delete();
}
};
this.quadraticCurveTo = function(cpx, cpy, x, y) {
quadraticCurveTo(this._currentPath, cpx, cpy, x, y);
}
};
this.rect = function(x, y, width, height) {
rect(this._currentPath, x, y, width, height);
}
};
this.resetTransform = function() {
// Apply the current transform to the path and then reset
@ -877,7 +883,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._canvas.concat(inverted);
// This should be identity, modulo floating point drift.
this._currentTransform = this._canvas.getTotalMatrix();
}
};
this.restore = function() {
var newState = this._canvasStateStack.pop();
@ -915,7 +921,7 @@ function CanvasRenderingContext2D(skcanvas) {
// restores the clip and ctm
this._canvas.restore();
this._currentTransform = this._canvas.getTotalMatrix();
}
};
this.rotate = function(radians) {
if (!isFinite(radians)) {
@ -927,7 +933,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._currentPath.transform(inverted);
this._canvas.rotate(radiansToDegrees(radians), 0, 0);
this._currentTransform = this._canvas.getTotalMatrix();
}
};
this.save = function() {
if (this._fillStyle._copy) {
@ -965,7 +971,7 @@ function CanvasRenderingContext2D(skcanvas) {
});
// Saves the clip
this._canvas.save();
}
};
this.scale = function(sx, sy) {
if (!allAreFinite(arguments)) {
@ -977,7 +983,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._currentPath.transform(inverted);
this._canvas.scale(sx, sy);
this._currentTransform = this._canvas.getTotalMatrix();
}
};
this.setLineDash = function(dashes) {
for (var i = 0; i < dashes.length; i++) {
@ -992,7 +998,7 @@ function CanvasRenderingContext2D(skcanvas) {
Array.prototype.push.apply(dashes, dashes);
}
this._lineDashList = dashes;
}
};
this.setTransform = function(a, b, c, d, e, f) {
if (!(allAreFinite(arguments))) {
@ -1000,7 +1006,7 @@ function CanvasRenderingContext2D(skcanvas) {
}
this.resetTransform();
this.transform(a, b, c, d, e, f);
}
};
// We need to apply the shadowOffsets on the device coordinates, so we undo
// the CTM, apply the offsets, then re-apply the CTM.
@ -1009,7 +1015,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._canvas.concat(inverted);
this._canvas.concat(CanvasKit.Matrix.translated(this._shadowOffsetX, this._shadowOffsetY));
this._canvas.concat(this._currentTransform);
}
};
// Returns the shadow paint for the current settings or null if there
// should be no shadow. This ends up being a copy of the given
@ -1040,7 +1046,7 @@ function CanvasRenderingContext2D(skcanvas) {
this.delete();
};
return shadowPaint;
}
};
// A helper to get a copy of the current paint, ready for stroking.
// This applies the global alpha and the dashedness.
@ -1067,9 +1073,9 @@ function CanvasRenderingContext2D(skcanvas) {
paint.dispose = function() {
dashedEffect && dashedEffect.delete();
this.delete();
}
};
return paint;
}
};
this.stroke = function(path) {
path = path ? path._getPath() : this._currentPath;
@ -1086,7 +1092,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._canvas.drawPath(path, strokePaint);
strokePaint.dispose();
}
};
this.strokeRect = function(x, y, width, height) {
var strokePaint = this._strokePaint();
@ -1101,7 +1107,7 @@ function CanvasRenderingContext2D(skcanvas) {
}
this._canvas.drawRect(CanvasKit.XYWHRect(x, y, width, height), strokePaint);
strokePaint.dispose();
}
};
this.strokeText = function(text, x, y, maxWidth) {
// TODO do something with maxWidth, probably involving measure
@ -1119,7 +1125,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._canvas.drawTextBlob(blob, x, y, strokePaint);
blob.delete();
strokePaint.dispose();
}
};
this.translate = function(dx, dy) {
if (!allAreFinite(arguments)) {
@ -1131,7 +1137,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._currentPath.transform(inverted);
this._canvas.translate(dx, dy);
this._currentTransform = this._canvas.getTotalMatrix();
}
};
this.transform = function(a, b, c, d, e, f) {
var newTransform = [a, c, e,
@ -1143,7 +1149,7 @@ function CanvasRenderingContext2D(skcanvas) {
this._currentPath.transform(inverted);
this._canvas.concat(newTransform);
this._currentTransform = this._canvas.getTotalMatrix();
}
};
// Not supported operations (e.g. for Web only)
this.addHitRegion = function() {};

View File

@ -920,22 +920,17 @@ CanvasKit.onRuntimeInitialized = function() {
return this._makeShader(xTileMode, yTileMode, localMatrixPtr);
};
CanvasKit.Image.prototype.readPixels = function(imageInfo, srcX, srcY, destMallocObj) {
var rowBytes;
// Important to use ['string'] notation here, otherwise the closure compiler will
// minify away the colorType.
switch (imageInfo['colorType']) {
case CanvasKit.ColorType.RGBA_8888:
rowBytes = imageInfo.width * 4; // 1 byte per channel == 4 bytes per pixel in 8888
break;
case CanvasKit.ColorType.RGBA_F32:
rowBytes = imageInfo.width * 16; // 4 bytes per channel == 16 bytes per pixel in F32
break;
default:
Debug('Colortype not yet supported');
return;
function readPixels(source, srcX, srcY, imageInfo, destMallocObj, bytesPerRow) {
if (!bytesPerRow) {
bytesPerRow = 4 * imageInfo['width'];
if (imageInfo['colorType'] === CanvasKit.ColorType.RGBA_F16) {
bytesPerRow *= 2;
}
else if (imageInfo['colorType'] === CanvasKit.ColorType.RGBA_F32) {
bytesPerRow *= 4;
}
}
var pBytes = rowBytes * imageInfo.height;
var pBytes = bytesPerRow * imageInfo.height;
var pPtr;
if (destMallocObj) {
pPtr = destMallocObj['byteOffset'];
@ -943,7 +938,7 @@ CanvasKit.onRuntimeInitialized = function() {
pPtr = CanvasKit._malloc(pBytes);
}
if (!this._readPixels(imageInfo, pPtr, rowBytes, srcX, srcY)) {
if (!source._readPixels(imageInfo, pPtr, bytesPerRow, srcX, srcY)) {
Debug('Could not read pixels with the given inputs');
if (!destMallocObj) {
CanvasKit._free(pPtr);
@ -961,16 +956,25 @@ CanvasKit.onRuntimeInitialized = function() {
var retVal = null;
switch (imageInfo['colorType']) {
case CanvasKit.ColorType.RGBA_8888:
case CanvasKit.ColorType.RGBA_F16: // there is no half-float JS type, so we return raw bytes.
retVal = new Uint8Array(CanvasKit.HEAPU8.buffer, pPtr, pBytes).slice();
break;
case CanvasKit.ColorType.RGBA_F32:
retVal = new Float32Array(CanvasKit.HEAPU8.buffer, pPtr, pBytes).slice();
break;
default:
Debug('ColorType not yet supported');
return null;
}
// Free the allocated pixels in the WASM memory
CanvasKit._free(pPtr);
return retVal;
}
CanvasKit.Image.prototype.readPixels = function(srcX, srcY, imageInfo, destMallocObj,
bytesPerRow) {
return readPixels(this, srcX, srcY, imageInfo, destMallocObj, bytesPerRow);
};
// Accepts an array of four numbers in the range of 0-1 representing a 4f color
@ -1176,52 +1180,9 @@ CanvasKit.onRuntimeInitialized = function() {
return rv;
};
// TODO(kjlubick) align this API with Image.readPixels
CanvasKit.Canvas.prototype.readPixels = function(x, y, w, h, alphaType,
colorType, colorSpace, dstRowBytes,
destMallocObj) {
// supply defaults (which are compatible with HTMLCanvas's getImageData)
alphaType = alphaType || CanvasKit.AlphaType.Unpremul;
colorType = colorType || CanvasKit.ColorType.RGBA_8888;
colorSpace = colorSpace || CanvasKit.ColorSpace.SRGB;
var pixBytes = 4;
if (colorType === CanvasKit.ColorType.RGBA_F16) {
pixBytes = 8;
}
dstRowBytes = dstRowBytes || (pixBytes * w);
var len = h * dstRowBytes;
var pPtr;
if (destMallocObj) {
pPtr = destMallocObj['byteOffset'];
} else {
pPtr = CanvasKit._malloc(len);
}
var ok = this._readPixels({
'width': w,
'height': h,
'colorType': colorType,
'alphaType': alphaType,
'colorSpace': colorSpace,
}, pPtr, dstRowBytes, x, y);
if (!ok) {
if (!destMallocObj) {
CanvasKit._free(pPtr);
}
return null;
}
// If the user provided us a buffer to copy into, we don't need to allocate a new TypedArray.
if (destMallocObj) {
return destMallocObj['toTypedArray'](); // Return the typed array wrapper w/o allocating.
}
// The first typed array is just a view into memory. Because we will
// be free-ing that, we call slice to make a persistent copy.
var pixels = new Uint8Array(CanvasKit.HEAPU8.buffer, pPtr, len).slice();
CanvasKit._free(pPtr);
return pixels;
CanvasKit.Canvas.prototype.readPixels = function(srcX, srcY, imageInfo, destMallocObj,
bytesPerRow) {
return readPixels(this, srcX, srcY, imageInfo, destMallocObj, bytesPerRow);
};
CanvasKit.Canvas.prototype.saveLayer = function(paint, boundsRect, backdrop, flags) {
@ -1372,7 +1333,7 @@ CanvasKit.onRuntimeInitialized = function() {
CanvasKit.Shader.Lerp = CanvasKit.Shader.MakeLerp;
CanvasKit.Shader.MakeLinearGradient = function(start, end, colors, pos, mode, localMatrix, flags, colorSpace) {
colorSpace = colorSpace || null
colorSpace = colorSpace || null;
var cPtrInfo = copyFlexibleColorArray(colors);
var posPtr = copy1dArray(pos, 'HEAPF32');
flags = flags || 0;

View File

@ -146,8 +146,9 @@ describe('Core canvas behavior', () => {
width: img.width(),
height: img.height(),
};
const rowBytes = 4 * img.width();
const pixels = img.readPixels(imageInfo, 0, 0);
const pixels = img.readPixels(0, 0, imageInfo);
// We know the image is 512 by 512 pixels in size, each pixel
// requires 4 bytes (R, G, B, A).
expect(pixels.length).toEqual(512 * 512 * 4);
@ -156,7 +157,7 @@ describe('Core canvas behavior', () => {
const rdsData = CanvasKit.Malloc(Uint8Array, 512 * 5*512 * 4);
const pixels2 = rdsData.toTypedArray();
pixels2[0] = 127; // sentinel value, should be overwritten by readPixels.
img.readPixels(imageInfo, 0, 0, rdsData);
img.readPixels(0, 0, imageInfo, rdsData, rowBytes);
expect(rdsData.toTypedArray()[0]).toEqual(pixels[0]);
img.delete();
@ -900,8 +901,13 @@ describe('Core canvas behavior', () => {
mObj.toTypedArray()[0] = 127; // sentinel value. Should be overwritten by readPixels.
const canvas = surface.getCanvas();
canvas.clear(CanvasKit.TRANSPARENT);
const pixels = canvas.readPixels(0, 0, CANVAS_WIDTH, CANVAS_HEIGHT,
CanvasKit.AlphaType.Unpremul, CanvasKit.ColorType.RGBA_8888, colorSpace, null, mObj);
const pixels = canvas.readPixels(0, 0, {
width: CANVAS_WIDTH,
height: CANVAS_HEIGHT,
colorType: CanvasKit.ColorType.RGBA_8888,
alphaType: CanvasKit.AlphaType.Unpremul,
colorSpace: colorSpace
}, mObj, 4 * CANVAS_WIDTH);
expect(pixels).toBeTruthy('Could not read pixels from surface');
expect(pixels[0] !== 127).toBeTruthy();
expect(pixels[0]).toEqual(mObj.toTypedArray()[0]);
@ -922,8 +928,13 @@ describe('Core canvas behavior', () => {
expect(CanvasKit.ColorSpace.Equals(info.colorSpace, colorSpace))
.toBeTruthy("Surface not created with correct color space.");
const pixels = surface.getCanvas().readPixels(0, 0, CANVAS_WIDTH, CANVAS_HEIGHT,
CanvasKit.AlphaType.Unpremul, CanvasKit.ColorType.RGBA_F16, colorSpace);
const pixels = surface.getCanvas().readPixels(0, 0, {
width: CANVAS_WIDTH,
height: CANVAS_HEIGHT,
colorType: CanvasKit.ColorType.RGBA_F16,
alphaType: CanvasKit.AlphaType.Unpremul,
colorSpace: colorSpace
});
expect(pixels).toBeTruthy('Could not read pixels from surface');
});
it('Can create an Adobe RGB surface', () => {
@ -931,7 +942,7 @@ describe('Core canvas behavior', () => {
const surface = CanvasKit.MakeCanvasSurface('test', CanvasKit.ColorSpace.ADOBE_RGB);
expect(surface).toBeTruthy('Could not make surface');
if (!surface.reportBackendTypeIsGPU()) {
console.log('Not expecting color space support in cpu backed suface.');
console.log('Not expecting color space support in cpu backed surface.');
return;
}
let info = surface.imageInfo();
@ -940,8 +951,13 @@ describe('Core canvas behavior', () => {
expect(CanvasKit.ColorSpace.Equals(info.colorSpace, colorSpace))
.toBeTruthy("Surface not created with correct color space.");
const pixels = surface.getCanvas().readPixels(0, 0, CANVAS_WIDTH, CANVAS_HEIGHT,
CanvasKit.AlphaType.Unpremul, CanvasKit.ColorType.RGBA_F16, colorSpace);
const pixels = surface.getCanvas().readPixels(0, 0, {
width: CANVAS_WIDTH,
height: CANVAS_HEIGHT,
colorType: CanvasKit.ColorType.RGBA_F16,
alphaType: CanvasKit.AlphaType.Unpremul,
colorSpace: colorSpace
});
expect(pixels).toBeTruthy('Could not read pixels from surface');
});

View File

@ -18,7 +18,7 @@ const _commonGM = (it, pause, name, callback, assetsToFetchOrPromisesToWaitOn) =
}
it('draws gm '+name, (done) => {
const surface = CanvasKit.MakeCanvasSurface('test');
expect(surface).toBeTruthy('Could not make surface')
expect(surface).toBeTruthy('Could not make surface');
if (!surface) {
done();
return;
@ -186,7 +186,16 @@ function reportSurface(surface, testname, done) {
// data. So, we copy it out and draw it to a normal canvas to take a picture.
// To be consistent across CPU and GPU, we just do it for all configurations
// (even though the CPU canvas shows up after flush just fine).
let pixels = surface.getCanvas().readPixels(0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
let pixels = surface.getCanvas().readPixels(0, 0, {
width: CANVAS_WIDTH,
height: CANVAS_HEIGHT,
colorType: CanvasKit.ColorType.RGBA_8888,
alphaType: CanvasKit.AlphaType.Unpremul,
colorSpace: CanvasKit.ColorSpace.SRGB,
});
if (!pixels) {
throw 'Could not get pixels for test '+testname;
}
pixels = new Uint8ClampedArray(pixels.buffer);
const imageData = new ImageData(pixels, CANVAS_WIDTH, CANVAS_HEIGHT);