From 8edef2bd458795348af6c333c8b39792fa308c07 Mon Sep 17 00:00:00 2001 From: Michael Barry Date: Tue, 10 Oct 2023 07:00:19 -0700 Subject: [PATCH] Fix Raster DEM decoding in safari private browsing mode (#3185) * working * tests * rm force * rm debug * convert promise to callback and some tricks to reduce bundle size * fix test * back to async/await * mangle test * indent * fix mangle test * update changelog * changelog tweak * respond to comments * fixes * changelog * comments * comment * fix test * handle html image decoding in main thread * tweak * bump size * comments * refactor getImage call --- CHANGELOG.md | 3 +- src/source/raster_dem_tile_source.ts | 30 +++- src/source/raster_dem_tile_worker_source.ts | 31 +--- src/util/offscreen_canvas_distorted.test.ts | 13 ++ src/util/offscreen_canvas_distorted.ts | 39 +++++ src/util/util.test.ts | 172 +++++++++++++++++++- src/util/util.ts | 150 +++++++++++++++++ test/build/min.test.ts | 2 +- 8 files changed, 404 insertions(+), 36 deletions(-) create mode 100644 src/util/offscreen_canvas_distorted.test.ts create mode 100644 src/util/offscreen_canvas_distorted.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index 11168eae92..b8c874e03b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,8 @@ ### 🐞 Bug fixes -- Fix setStyle->style.setState didn't reset _serializedLayers ([#3133](https://github.com/maplibre/maplibre-gl-js/pull/3133)). +- Fix setStyle->style.setState didn't reset \_serializedLayers ([#3133](https://github.com/maplibre/maplibre-gl-js/pull/3133)). +- Fix Raster DEM decoding in safari private browsing mode ([#3185](https://github.com/maplibre/maplibre-gl-js/pull/3185)) - _...Add new stuff here..._ ## 3.4.0 diff --git a/src/source/raster_dem_tile_source.ts b/src/source/raster_dem_tile_source.ts index 63b89cd2c7..19d052245e 100644 --- a/src/source/raster_dem_tile_source.ts +++ b/src/source/raster_dem_tile_source.ts @@ -1,6 +1,6 @@ import {ImageRequest} from '../util/image_request'; import {ResourceType} from '../util/request_manager'; -import {extend, isImageBitmap} from '../util/util'; +import {extend, isImageBitmap, readImageUsingVideoFrame} from '../util/util'; import {Evented} from '../util/evented'; import {browser} from '../util/browser'; import {offscreenCanvasSupported} from '../util/offscreen_canvas_supported'; @@ -16,6 +16,8 @@ import type {Tile} from './tile'; import type {Callback} from '../types/callback'; import type {RasterDEMSourceSpecification} from '@maplibre/maplibre-gl-style-spec'; import type {ExpiryData} from '../util/ajax'; +import {isOffscreenCanvasDistorted} from '../util/offscreen_canvas_distorted'; +import {RGBAImage} from '../util/image'; /** * A source containing raster DEM tiles (See the [Style Specification](https://maplibre.org/maplibre-style-spec/) for detailed documentation of options.) @@ -54,10 +56,9 @@ export class RasterDEMTileSource extends RasterTileSource implements Source { loadTile(tile: Tile, callback: Callback) { const url = tile.tileID.canonical.url(this.tiles, this.map.getPixelRatio(), this.scheme); - tile.request = ImageRequest.getImage(this.map._requestManager.transformRequest(url, ResourceType.Tile), imageLoaded.bind(this), this.map._refreshExpiredTiles); - + const request = this.map._requestManager.transformRequest(url, ResourceType.Tile); tile.neighboringTiles = this._getNeighboringTiles(tile.tileID); - function imageLoaded(err: Error, img: (HTMLImageElement | ImageBitmap) & ExpiryData) { + tile.request = ImageRequest.getImage(request, async (err: Error, img: (HTMLImageElement | ImageBitmap), expiry: ExpiryData) => { delete tile.request; if (tile.aborted) { tile.state = 'unloaded'; @@ -66,11 +67,9 @@ export class RasterDEMTileSource extends RasterTileSource implements Source { tile.state = 'errored'; callback(err); } else if (img) { - if (this.map._refreshExpiredTiles) tile.setExpiryData(img); - delete img.cacheControl; - delete img.expires; + if (this.map._refreshExpiredTiles) tile.setExpiryData(expiry); const transfer = isImageBitmap(img) && offscreenCanvasSupported(); - const rawImageData = transfer ? img : browser.getImageData(img, 1); + const rawImageData = transfer ? img : await readImageNow(img); const params = { uid: tile.uid, coord: tile.tileID, @@ -85,9 +84,22 @@ export class RasterDEMTileSource extends RasterTileSource implements Source { if (!tile.actor || tile.state === 'expired') { tile.actor = this.dispatcher.getActor(); - tile.actor.send('loadDEMTile', params, done.bind(this)); + tile.actor.send('loadDEMTile', params, done); + } + } + }, this.map._refreshExpiredTiles); + + async function readImageNow(img: ImageBitmap | HTMLImageElement): Promise { + if (typeof VideoFrame !== 'undefined' && isOffscreenCanvasDistorted()) { + const width = img.width + 2; + const height = img.height + 2; + try { + return new RGBAImage({width, height}, await readImageUsingVideoFrame(img, -1, -1, width, height)); + } catch (e) { + // fall-back to browser canvas decoding } } + return browser.getImageData(img, 1); } function done(err, data) { diff --git a/src/source/raster_dem_tile_worker_source.ts b/src/source/raster_dem_tile_worker_source.ts index 8b0d58ec60..d283804633 100644 --- a/src/source/raster_dem_tile_worker_source.ts +++ b/src/source/raster_dem_tile_worker_source.ts @@ -6,46 +6,29 @@ import type { WorkerDEMTileCallback, TileParameters } from './worker_source'; -import {isImageBitmap} from '../util/util'; +import {getImageData, isImageBitmap} from '../util/util'; export class RasterDEMTileWorkerSource { actor: Actor; loaded: {[_: string]: DEMData}; - offscreenCanvas: OffscreenCanvas; - offscreenCanvasContext: OffscreenCanvasRenderingContext2D; constructor() { this.loaded = {}; } - loadTile(params: WorkerDEMTileParameters, callback: WorkerDEMTileCallback) { + async loadTile(params: WorkerDEMTileParameters, callback: WorkerDEMTileCallback) { const {uid, encoding, rawImageData, redFactor, greenFactor, blueFactor, baseShift} = params; - // Main thread will transfer ImageBitmap if offscreen decode with OffscreenCanvas is supported, else it will transfer an already decoded image. - const imagePixels = isImageBitmap(rawImageData) ? this.getImageData(rawImageData) : rawImageData as RGBAImage; + const width = rawImageData.width + 2; + const height = rawImageData.height + 2; + const imagePixels: RGBAImage = isImageBitmap(rawImageData) ? + new RGBAImage({width, height}, await getImageData(rawImageData, -1, -1, width, height)) : + rawImageData; const dem = new DEMData(uid, imagePixels, encoding, redFactor, greenFactor, blueFactor, baseShift); this.loaded = this.loaded || {}; this.loaded[uid] = dem; callback(null, dem); } - getImageData(imgBitmap: ImageBitmap): RGBAImage { - // Lazily initialize OffscreenCanvas - if (!this.offscreenCanvas || !this.offscreenCanvasContext) { - // Dem tiles are typically 256x256 - this.offscreenCanvas = new OffscreenCanvas(imgBitmap.width, imgBitmap.height); - this.offscreenCanvasContext = this.offscreenCanvas.getContext('2d', {willReadFrequently: true}); - } - - this.offscreenCanvas.width = imgBitmap.width; - this.offscreenCanvas.height = imgBitmap.height; - - this.offscreenCanvasContext.drawImage(imgBitmap, 0, 0, imgBitmap.width, imgBitmap.height); - // Insert an additional 1px padding around the image to allow backfilling for neighboring data. - const imgData = this.offscreenCanvasContext.getImageData(-1, -1, imgBitmap.width + 2, imgBitmap.height + 2); - this.offscreenCanvasContext.clearRect(0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height); - return new RGBAImage({width: imgData.width, height: imgData.height}, imgData.data); - } - removeTile(params: TileParameters) { const loaded = this.loaded, uid = params.uid; diff --git a/src/util/offscreen_canvas_distorted.test.ts b/src/util/offscreen_canvas_distorted.test.ts new file mode 100644 index 0000000000..bf5c4bf483 --- /dev/null +++ b/src/util/offscreen_canvas_distorted.test.ts @@ -0,0 +1,13 @@ +import {isOffscreenCanvasDistorted} from './offscreen_canvas_distorted'; +import {Canvas} from 'canvas'; +import {offscreenCanvasSupported} from './offscreen_canvas_supported'; + +test('normal operation does not mangle canvas', () => { + const OffscreenCanvas = (window as any).OffscreenCanvas = jest.fn((width:number, height: number) => { + return new Canvas(width, height); + }); + expect(offscreenCanvasSupported()).toBeTruthy(); + OffscreenCanvas.mockClear(); + expect(isOffscreenCanvasDistorted()).toBeFalsy(); + expect(OffscreenCanvas).toHaveBeenCalledTimes(1); +}); diff --git a/src/util/offscreen_canvas_distorted.ts b/src/util/offscreen_canvas_distorted.ts new file mode 100644 index 0000000000..15475a87e5 --- /dev/null +++ b/src/util/offscreen_canvas_distorted.ts @@ -0,0 +1,39 @@ +import {offscreenCanvasSupported} from './offscreen_canvas_supported'; + +let offscreenCanvasDistorted: boolean; + +/** + * Some browsers don't return the exact pixels from a canvas to prevent user fingerprinting (see #3185). + * This function writes pixels to an OffscreenCanvas and reads them back using getImageData, returning false + * if they don't match. + * + * @returns true if the browser supports OffscreenCanvas but it distorts getImageData results, false otherwise. + */ +export function isOffscreenCanvasDistorted(): boolean { + if (offscreenCanvasDistorted == null) { + offscreenCanvasDistorted = false; + if (offscreenCanvasSupported()) { + const size = 5; + const canvas = new OffscreenCanvas(size, size); + const context = canvas.getContext('2d', {willReadFrequently: true}); + if (context) { + // fill each pixel with an RGB value that should make the byte at index i equal to i (except alpha channel): + // [0, 1, 2, 255, 4, 5, 6, 255, 8, 9, 10, 255, ...] + for (let i = 0; i < size * size; i++) { + const base = i * 4; + context.fillStyle = `rgb(${base},${base + 1},${base + 2})`; + context.fillRect(i % size, Math.floor(i / size), 1, 1); + } + const data = context.getImageData(0, 0, size, size).data; + for (let i = 0; i < size * size * 4; i++) { + if (i % 4 !== 3 && data[i] !== i) { + offscreenCanvasDistorted = true; + break; + } + } + } + } + } + + return offscreenCanvasDistorted || false; +} diff --git a/src/util/util.test.ts b/src/util/util.test.ts index bf20895437..47ede250b3 100644 --- a/src/util/util.test.ts +++ b/src/util/util.test.ts @@ -1,5 +1,6 @@ import Point from '@mapbox/point-geometry'; -import {arraysIntersect, asyncAll, bezier, clamp, clone, deepEqual, easeCubicInOut, extend, filterObject, findLineIntersection, isClosedPolygon, isCounterClockwise, isPowerOfTwo, keysDifference, mapObject, nextPowerOfTwo, parseCacheControl, pick, uniqueId, wrap} from './util'; +import {arraysIntersect, asyncAll, bezier, clamp, clone, deepEqual, easeCubicInOut, extend, filterObject, findLineIntersection, isClosedPolygon, isCounterClockwise, isPowerOfTwo, keysDifference, mapObject, nextPowerOfTwo, parseCacheControl, pick, readImageDataUsingOffscreenCanvas, readImageUsingVideoFrame, uniqueId, wrap} from './util'; +import {Canvas} from 'canvas'; describe('util', () => { expect(easeCubicInOut(0)).toBe(0); @@ -346,3 +347,172 @@ describe('util findLineIntersection', () => { expect(intersection).toBeNull(); }); }); + +describe('util readImageUsingVideoFrame', () => { + let format = 'RGBA'; + const frame = { + get format() { + return format; + }, + copyTo: jest.fn(buf => { + buf.set(new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]).subarray(0, buf.length)); + return Promise.resolve(); + }), + close: jest.fn(), + }; + (window as any).VideoFrame = jest.fn(() => frame); + const canvas = document.createElement('canvas'); + canvas.width = canvas.height = 2; + + beforeEach(() => { + format = 'RGBA'; + frame.copyTo.mockClear(); + frame.close.mockReset(); + }); + + test('copy RGB', async () => { + format = 'RGBA'; + const result = await readImageUsingVideoFrame(canvas, 0, 0, 2, 2); + expect(result).toHaveLength(4 * 4); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 0, stride: 8}], + rect: {x: 0, y: 0, width: 2, height: 2} + }); + expect(result).toEqual(new Uint8ClampedArray([ + 1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16 + ])); + expect(frame.close).toHaveBeenCalledTimes(1); + }); + + test('flip BRG', async () => { + format = 'BGRX'; + const result = await readImageUsingVideoFrame(canvas, 0, 0, 2, 2); + expect(result).toEqual(new Uint8ClampedArray([ + 3, 2, 1, 4, 7, 6, 5, 8, + 11, 10, 9, 12, 15, 14, 13, 16 + ])); + expect(frame.close).toHaveBeenCalledTimes(1); + }); + + test('ignore bad format', async () => { + format = 'OTHER'; + await expect(readImageUsingVideoFrame(canvas, 0, 0, 2, 2)).rejects.toThrow(); + expect(frame.close).toHaveBeenCalledTimes(1); + }); + + describe('layout/rect', () => { + beforeEach(() => { + (window as any).VideoFrame = jest.fn(() => frame); + canvas.width = canvas.height = 3; + }); + + test('full rectangle', async () => { + await readImageUsingVideoFrame(canvas, 0, 0, 3, 3); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 0, stride: 12}], + rect: {x: 0, y: 0, width: 3, height: 3} + }); + }); + + test('top left', async () => { + await readImageUsingVideoFrame(canvas, 0, 0, 2, 2); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 0, stride: 8}], + rect: {x: 0, y: 0, width: 2, height: 2} + }); + }); + + test('top right', async () => { + await readImageUsingVideoFrame(canvas, 1, 0, 2, 2); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 0, stride: 8}], + rect: {x: 1, y: 0, width: 2, height: 2} + }); + }); + + test('bottom left', async () => { + await readImageUsingVideoFrame(canvas, 0, 1, 2, 2); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 0, stride: 8}], + rect: {x: 0, y: 1, width: 2, height: 2} + }); + }); + + test('bottom right', async () => { + await readImageUsingVideoFrame(canvas, 1, 1, 2, 2); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 0, stride: 8}], + rect: {x: 1, y: 1, width: 2, height: 2} + }); + }); + + test('middle', async () => { + await readImageUsingVideoFrame(canvas, 1, 1, 1, 1); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 0, stride: 4}], + rect: {x: 1, y: 1, width: 1, height: 1} + }); + }); + + test('extend past on all sides', async () => { + await readImageUsingVideoFrame(canvas, -1, -1, 5, 5); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 4 * 5 + 4, stride: 4 * 5}], + rect: {x: 0, y: 0, width: 3, height: 3} + }); + }); + + test('overhang top left', async () => { + await readImageUsingVideoFrame(canvas, -1, -1, 2, 2); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 4 * 2 + 4, stride: 4 * 2}], + rect: {x: 0, y: 0, width: 1, height: 1} + }); + }); + + test('overhang top right', async () => { + await readImageUsingVideoFrame(canvas, 2, -1, 2, 2); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 4 * 2, stride: 4 * 2}], + rect: {x: 2, y: 0, width: 1, height: 1} + }); + }); + + test('overhang bottom left', async () => { + await readImageUsingVideoFrame(canvas, -1, 2, 2, 2); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 4, stride: 4 * 2}], + rect: {x: 0, y: 2, width: 1, height: 1} + }); + }); + + test('overhang bottom right', async () => { + await readImageUsingVideoFrame(canvas, 2, 2, 2, 2); + expect(frame.copyTo).toHaveBeenCalledWith(expect.anything(), { + layout: [{offset: 0, stride: 4 * 2}], + rect: {x: 2, y: 2, width: 1, height: 1} + }); + }); + }); +}); + +describe('util readImageDataUsingOffscreenCanvas', () => { + test('reads pixels from image', async () => { + (window as any).OffscreenCanvas = Canvas; + const image = new Canvas(2, 2); + const context = image.getContext('2d'); + context.fillStyle = 'rgb(10,0,0)'; + context.fillRect(0, 0, 1, 1); + context.fillStyle = 'rgb(0,20,0)'; + context.fillRect(1, 0, 1, 1); + context.fillStyle = 'rgb(0,0,30)'; + context.fillRect(0, 1, 1, 1); + context.fillStyle = 'rgb(40,40,40)'; + context.fillRect(1, 1, 1, 1); + expect([...await readImageDataUsingOffscreenCanvas(image as any, 0, 0, 2, 2)]).toEqual([ + 10, 0, 0, 255, 0, 20, 0, 255, + 0, 0, 30, 255, 40, 40, 40, 255, + ]); + }); +}); diff --git a/src/util/util.ts b/src/util/util.ts index 01f14383a5..13e0c7db9e 100644 --- a/src/util/util.ts +++ b/src/util/util.ts @@ -1,6 +1,8 @@ import Point from '@mapbox/point-geometry'; import UnitBezier from '@mapbox/unitbezier'; import type {Callback} from '../types/callback'; +import {isOffscreenCanvasDistorted} from './offscreen_canvas_distorted'; +import type {Size} from './image'; /** * Given a value `t` that varies between 0 and 1, return @@ -517,3 +519,151 @@ export function arrayBufferToImage(data: ArrayBuffer, callback: (err?: Error | n const blob: Blob = new Blob([new Uint8Array(data)], {type: 'image/png'}); img.src = data.byteLength ? URL.createObjectURL(blob) : transparentPngUrl; } + +/** + * Computes the webcodecs VideoFrame API options to select a rectangle out of + * an image and write it into the destination rectangle. + * + * Rect (x/y/width/height) select the overlapping rectangle from the source image + * and layout (offset/stride) write that overlapping rectangle to the correct place + * in the destination image. + * + * Offset is the byte offset in the dest image that the first pixel appears at + * and stride is the number of bytes to the start of the next row: + * β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” + * β”‚ dest β”‚ + * β”‚ β”Œβ”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β” + * β”‚offsetβ†’β”‚β–“β–“β–“β”‚ sourceβ”‚ + * β”‚ β”‚β–“β–“β–“β”‚ β”‚ + * β”‚ β””β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”˜ + * β”‚stride β‡ β•Œβ•Œβ•Œβ”‚ + * β”‚β•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ†’ β”‚ + * β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + * + * @param image - source image containing a width and height attribute + * @param x - top-left x coordinate to read from the image + * @param y - top-left y coordinate to read from the image + * @param width - width of the rectangle to read from the image + * @param height - height of the rectangle to read from the image + * @returns the layout and rect options to pass into VideoFrame API + */ +function computeVideoFrameParameters(image: Size, x: number, y: number, width: number, height: number): VideoFrameCopyToOptions { + const destRowOffset = Math.max(-x, 0) * 4; + const firstSourceRow = Math.max(0, y); + const firstDestRow = firstSourceRow - y; + const offset = firstDestRow * width * 4 + destRowOffset; + const stride = width * 4; + + const sourceLeft = Math.max(0, x); + const sourceTop = Math.max(0, y); + const sourceRight = Math.min(image.width, x + width); + const sourceBottom = Math.min(image.height, y + height); + return { + rect: { + x: sourceLeft, + y: sourceTop, + width: sourceRight - sourceLeft, + height: sourceBottom - sourceTop + }, + layout: [{offset, stride}] + }; +} + +/** + * Reads pixels from an ImageBitmap/Image/canvas using webcodec VideoFrame API. + * + * @param data - image, imagebitmap, or canvas to parse + * @param x - top-left x coordinate to read from the image + * @param y - top-left y coordinate to read from the image + * @param width - width of the rectangle to read from the image + * @param height - height of the rectangle to read from the image + * @returns a promise containing the parsed RGBA pixel values of the image, or the error if an error occurred + */ +export async function readImageUsingVideoFrame( + image: HTMLImageElement | HTMLCanvasElement | ImageBitmap | OffscreenCanvas, + x: number, y: number, width: number, height: number +): Promise { + if (typeof VideoFrame === 'undefined') { + throw new Error('VideoFrame not supported'); + } + const frame = new VideoFrame(image, {timestamp: 0}); + try { + const format = frame?.format; + if (!format || !(format.startsWith('BGR') || format.startsWith('RGB'))) { + throw new Error(`Unrecognized format ${format}`); + } + const swapBR = format.startsWith('BGR'); + const result = new Uint8ClampedArray(width * height * 4); + await frame.copyTo(result, computeVideoFrameParameters(image, x, y, width, height)); + if (swapBR) { + for (let i = 0; i < result.length; i += 4) { + const tmp = result[i]; + result[i] = result[i + 2]; + result[i + 2] = tmp; + } + } + return result; + } finally { + frame.close(); + } +} + +let offscreenCanvas: OffscreenCanvas; +let offscreenCanvasContext: OffscreenCanvasRenderingContext2D; + +/** + * Reads pixels from an ImageBitmap/Image/canvas using OffscreenCanvas + * + * @param data - image, imagebitmap, or canvas to parse + * @param x - top-left x coordinate to read from the image + * @param y - top-left y coordinate to read from the image + * @param width - width of the rectangle to read from the image + * @param height - height of the rectangle to read from the image + * @returns a promise containing the parsed RGBA pixel values of the image, or the error if an error occurred + */ +export function readImageDataUsingOffscreenCanvas( + imgBitmap: HTMLImageElement | HTMLCanvasElement | ImageBitmap | OffscreenCanvas, + x: number, y: number, width: number, height: number +): Uint8ClampedArray { + const origWidth = imgBitmap.width; + const origHeight = imgBitmap.height; + // Lazily initialize OffscreenCanvas + if (!offscreenCanvas || !offscreenCanvasContext) { + // Dem tiles are typically 256x256 + offscreenCanvas = new OffscreenCanvas(origWidth, origHeight); + offscreenCanvasContext = offscreenCanvas.getContext('2d', {willReadFrequently: true}); + } + + offscreenCanvas.width = origWidth; + offscreenCanvas.height = origHeight; + + offscreenCanvasContext.drawImage(imgBitmap, 0, 0, origWidth, origHeight); + const imgData = offscreenCanvasContext.getImageData(x, y, width, height); + offscreenCanvasContext.clearRect(0, 0, origWidth, origHeight); + return imgData.data; +} + +/** + * Reads RGBA pixels from an preferring OffscreenCanvas, but falling back to VideoFrame if supported and + * the browser is mangling OffscreenCanvas getImageData results. + * + * @param data - image, imagebitmap, or canvas to parse + * @param x - top-left x coordinate to read from the image + * @param y - top-left y coordinate to read from the image + * @param width - width of the rectangle to read from the image + * @param height - height of the rectangle to read from the image + * @returns a promise containing the parsed RGBA pixel values of the image + */ +export async function getImageData( + image: HTMLImageElement | HTMLCanvasElement | ImageBitmap | OffscreenCanvas, + x: number, y: number, width: number, height: number +): Promise { + if (isOffscreenCanvasDistorted()) { + try { + return await readImageUsingVideoFrame(image, x, y, width, height); + } catch (e) { + // fall back to OffscreenCanvas + } + } + return readImageDataUsingOffscreenCanvas(image, x, y, width, height); +} diff --git a/test/build/min.test.ts b/test/build/min.test.ts index 8624b64b72..bd97d4ad44 100644 --- a/test/build/min.test.ts +++ b/test/build/min.test.ts @@ -36,7 +36,7 @@ describe('test min build', () => { const decreaseQuota = 4096; // feel free to update this value after you've checked that it has changed on purpose :-) - const expectedBytes = 770837; + const expectedBytes = 772089; expect(actualBytes - expectedBytes).toBeLessThan(increaseQuota); expect(expectedBytes - actualBytes).toBeLessThan(decreaseQuota);