diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 43efb20..3f9fad5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,42 +14,18 @@ jobs: strategy: matrix: os: [ubuntu-latest, windows-latest, macOS-latest] - node: ["16", "15", "14", "12", engines] - exclude: - # On Windows, run tests with only the LTS environments. - - os: windows-latest - node: engines - - os: windows-latest - node: "14" - # On macOS, run tests with only the LTS environments. - - os: macOS-latest - node: engines - - os: macOS-latest - node: "14" + node: ["17.3"] runs-on: ${{ matrix.os }} + steps: - uses: actions/checkout@v2 - - - name: Get Node.JS version from package.json - if: matrix.node == 'engines' - id: get-version - run: echo ::set-output name=node::$(npx --q minimum-node-version) - - - uses: actions/setup-node@v2-beta - if: matrix.node != 'engines' - with: - node-version: ${{ matrix.node }} - - - uses: actions/setup-node@v2-beta - if: matrix.node == 'engines' + - uses: actions/setup-node@v2 with: - node-version: ${{steps.get-version.outputs.node}} - + node-version: '17.3' - run: npm install - + - run: npm test - run: npm run report -- --colors - - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 with: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 1ce559e..0000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: CI - -on: - pull_request: - paths: - - "**.js" - - "**eslint**" - - "package.json" - - ".github/workflows/lint.yml" -jobs: - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Use Node.js - uses: actions/setup-node@v2-beta - with: - node-version: 14 - - run: npm install - - run: npm run lint diff --git a/.gitignore b/.gitignore index 3296552..4d8cac9 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,8 @@ pids *.seed *.pid.lock + + # Directory for instrumented libs generated by jscoverage/JSCover lib-cov @@ -47,8 +49,8 @@ typings/ # Optional npm cache directory .npm -# Optional eslint cache -.eslintcache +# Optional cache +.cache # Optional REPL history .node_repl_history @@ -63,3 +65,5 @@ typings/ .env *.d.ts +*.d.cts +.DS_Store \ No newline at end of file diff --git a/file.js b/file.js index dad091c..7b26538 100644 --- a/file.js +++ b/file.js @@ -37,6 +37,11 @@ const _File = class File extends Blob { get [Symbol.toStringTag] () { return 'File' } + + static [Symbol.hasInstance] (object) { + return !!object && object instanceof Blob && + /^(File)$/.test(object[Symbol.toStringTag]) + } } /** @type {typeof globalThis.File} */// @ts-ignore diff --git a/from.js b/from.js index 430b071..9eaf8bf 100644 --- a/from.js +++ b/from.js @@ -1,18 +1,12 @@ import { statSync, createReadStream, promises as fs } from 'node:fs' import { basename } from 'node:path' -import { MessageChannel } from 'node:worker_threads' +import DOMException from 'node-domexception' import File from './file.js' import Blob from './index.js' const { stat } = fs -const DOMException = globalThis.DOMException || (() => { - const port = new MessageChannel().port1 - const ab = new ArrayBuffer(0) - try { port.postMessage(ab, [ab, ab]) } catch (err) { return err.constructor } -})() - /** * @param {string} path filepath on the disk * @param {string} [type] mimetype to use @@ -22,12 +16,14 @@ const blobFromSync = (path, type) => fromBlob(statSync(path), path, type) /** * @param {string} path filepath on the disk * @param {string} [type] mimetype to use + * @returns {Promise} */ const blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type)) /** * @param {string} path filepath on the disk * @param {string} [type] mimetype to use + * @returns {Promise} */ const fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type)) @@ -80,7 +76,7 @@ class BlobDataItem { path: this.#path, lastModified: this.lastModified, size: end - start, - start + start: this.#start + start }) } diff --git a/index.js b/index.js index 2148d73..2542ac2 100644 --- a/index.js +++ b/index.js @@ -5,16 +5,14 @@ import './streams.cjs' -/** @typedef {import('buffer').Blob} NodeBlob} */ - // 64 KiB (same size chrome slice theirs blob into Uint8array's) const POOL_SIZE = 65536 -/** @param {(Blob | NodeBlob | Uint8Array)[]} parts */ +/** @param {(Blob | Uint8Array)[]} parts */ async function * toIterator (parts, clone = true) { for (const part of parts) { if ('stream' in part) { - yield * part.stream() + yield * (/** @type {AsyncIterableIterator} */ (part.stream())) } else if (ArrayBuffer.isView(part)) { if (clone) { let position = part.byteOffset @@ -28,17 +26,16 @@ async function * toIterator (parts, clone = true) { } else { yield part } + /* c8 ignore next 10 */ } else { - /* c8 ignore start */ // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob) - let position = 0 - while (position !== part.size) { - const chunk = part.slice(position, Math.min(part.size, position + POOL_SIZE)) + let position = 0, b = (/** @type {Blob} */ (part)) + while (position !== b.size) { + const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE)) const buffer = await chunk.arrayBuffer() position += buffer.byteLength yield new Uint8Array(buffer) } - /* c8 ignore end */ } } } @@ -48,6 +45,7 @@ const _Blob = class Blob { #parts = [] #type = '' #size = 0 + #endings = 'transparent' /** * The Blob() constructor returns a new Blob object. The content @@ -55,7 +53,7 @@ const _Blob = class Blob { * in the parameter array. * * @param {*} blobParts - * @param {{ type?: string }} [options] + * @param {{ type?: string, endings?: string }} [options] */ constructor (blobParts = [], options = {}) { if (typeof blobParts !== 'object' || blobParts === null) { @@ -82,15 +80,15 @@ const _Blob = class Blob { } else if (element instanceof Blob) { part = element } else { - part = encoder.encode(element) + part = encoder.encode(`${element}`) } this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size this.#parts.push(part) } + this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}` const type = options.type === undefined ? '' : String(options.type) - this.#type = /^[\x20-\x7E]*$/.test(type) ? type : '' } @@ -156,6 +154,7 @@ const _Blob = class Blob { const it = toIterator(this.#parts, true) return new globalThis.ReadableStream({ + // @ts-ignore type: 'bytes', async pull (ctrl) { const chunk = await it.next() diff --git a/package.json b/package.json index 454a795..cfedf63 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "fetch-blob", - "version": "3.1.3", + "version": "3.1.4", "description": "Blob & File implementation in Node.js, originally from node-fetch.", "main": "index.js", "type": "module", @@ -14,10 +14,9 @@ "streams.cjs" ], "scripts": { - "test-wpt": "node --experimental-loader ./test/http-loader.js ./test/test-wpt-in-node.js", - "test": "ava test.js", - "report": "c8 --reporter json --reporter text ava test.js", - "coverage": "c8 --reporter json --reporter text ava test.js && codecov -f coverage/coverage-final.json", + "test": "node --experimental-loader ./test/http-loader.js ./test/test-wpt-in-node.js", + "report": "c8 --reporter json --reporter text npm run test", + "coverage": "npm run report && codecov -f coverage/coverage-final.json", "prepublishOnly": "tsc --declaration --emitDeclarationOnly --allowJs index.js from.js" }, "repository": "https://github.com/node-fetch/fetch-blob.git", @@ -36,11 +35,9 @@ }, "homepage": "https://github.com/node-fetch/fetch-blob#readme", "devDependencies": { - "ava": "^3.15.0", - "c8": "^7.7.2", - "codecov": "^3.8.2", - "node-fetch": "^3.0.0-beta.9", - "typescript": "^4.3.2" + "@types/node": "^17.0.9", + "c8": "^7.11.0", + "typescript": "^4.5.4" }, "funding": [ { @@ -53,6 +50,7 @@ } ], "dependencies": { + "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } } diff --git a/test.js b/test.js deleted file mode 100644 index a2968fc..0000000 --- a/test.js +++ /dev/null @@ -1,442 +0,0 @@ -import fs from 'fs' -import { Readable } from 'stream' -import buffer from 'buffer' -import test from 'ava' -import { Response } from 'node-fetch' -import syncBlob, { blobFromSync, blobFrom, fileFromSync, fileFrom } from './from.js' -import File from './file.js' -import Blob from './index.js' - -const license = fs.readFileSync('./LICENSE', 'utf-8') - -test('new Blob()', t => { - const blob = new Blob() // eslint-disable-line no-unused-vars - t.pass() -}) - -test('new Blob(parts)', t => { - const data = 'a=1' - const blob = new Blob([data]) // eslint-disable-line no-unused-vars - t.pass() -}) - -test('Blob ctor parts', async t => { - const parts = [ - 'a', - new Uint8Array([98]), - new Uint16Array([25699]), - new Uint8Array([101]).buffer, - Buffer.from('f'), - new Blob(['g']), - {}, - new URLSearchParams('foo') - ] - - const blob = new Blob(parts) - t.is(await blob.text(), 'abcdefg[object Object]foo=') -}) - -test('Blob ctor threats an object with @@iterator as a sequence', async t => { - const blob = new Blob({ [Symbol.iterator]: Array.prototype[Symbol.iterator] }) - - t.is(blob.size, 0) - t.is(await blob.text(), '') -}) - -test('Blob ctor reads blob parts from object with @@iterator', async t => { - const input = ['one', 'two', 'three'] - const expected = input.join('') - - const blob = new Blob({ - * [Symbol.iterator] () { - yield * input - } - }) - - t.is(blob.size, new TextEncoder().encode(expected).byteLength) - t.is(await blob.text(), expected) -}) - -test('Blob ctor throws a string', t => { - t.throws(() => new Blob('abc'), { - instanceOf: TypeError, - message: 'Failed to construct \'Blob\': The provided value cannot be converted to a sequence.' - }) -}) - -test('Blob ctor throws an error for an object that does not have @@iterable method', t => { - t.throws(() => new Blob({}), { - instanceOf: TypeError, - message: 'Failed to construct \'Blob\': The object must have a callable @@iterator property.' - }) -}) - -test('Blob ctor threats Uint8Array as a sequence', async t => { - const input = [1, 2, 3] - const blob = new Blob(new Uint8Array(input)) - - t.is(await blob.text(), input.join('')) -}) - -test('Blob size', t => { - const data = 'a=1' - const blob = new Blob([data]) - t.is(blob.size, data.length) -}) - -test('Blob type', t => { - const type = 'text/plain' - const blob = new Blob([], { type }) - t.is(blob.type, type) -}) - -test('Blob slice type', t => { - const type = 'text/plain' - const blob = new Blob().slice(0, 0, type) - t.is(blob.type, type) -}) - -test('invalid Blob type', t => { - const blob = new Blob([], { type: '\u001Ftext/plain' }) - t.is(blob.type, '') -}) - -test('invalid Blob slice type', t => { - const blob = new Blob().slice(0, 0, '\u001Ftext/plain') - t.is(blob.type, '') -}) - -test('Blob text()', async t => { - const data = 'a=1' - const type = 'text/plain' - const blob = new Blob([data], { type }) - t.is(await blob.text(), data) -}) - -test('Blob arrayBuffer()', async t => { - const data = 'a=1' - const type = 'text/plain' - const blob = new Blob([data], { type }) - - const decoder = new TextDecoder('utf-8') - const buffer = await blob.arrayBuffer() - t.is(decoder.decode(buffer), data) -}) - -test('Blob stream()', async t => { - const data = 'a=1' - const type = 'text/plain' - const blob = new Blob([data], { type }) - - for await (const chunk of blob.stream()) { - t.is(chunk.join(), [97, 61, 49].join()) - } -}) - -test('Blob stream() can be cancelled', async t => { - const stream = new Blob(['Some content']).stream() - - // Cancel the stream before start reading, or this will throw an error - await stream.cancel() - - const reader = stream.getReader() - - const { done, value: chunk } = await reader.read() - - t.true(done) - t.is(chunk, undefined) -}) - -test('Blob toString()', t => { - const data = 'a=1' - const type = 'text/plain' - const blob = new Blob([data], { type }) - t.is(blob.toString(), '[object Blob]') -}) - -test('Blob slice()', async t => { - const data = 'abcdefgh' - const blob = new Blob([data]).slice() - t.is(await blob.text(), data) -}) - -test('Blob slice(0, 1)', async t => { - const data = 'abcdefgh' - const blob = new Blob([data]).slice(0, 1) - t.is(await blob.text(), 'a') -}) - -test('Blob slice(-1)', async t => { - const data = 'abcdefgh' - const blob = new Blob([data]).slice(-1) - t.is(await blob.text(), 'h') -}) - -test('Blob slice(0, -1)', async t => { - const data = 'abcdefgh' - const blob = new Blob([data]).slice(0, -1) - t.is(await blob.text(), 'abcdefg') -}) - -test('Blob(["hello ", "world"]).slice(5)', async t => { - const parts = ['hello ', 'world'] - const blob = new Blob(parts) - t.is(await blob.slice(5).text(), ' world') -}) - -test('throw away unwanted parts', async t => { - const blob = new Blob(['a', 'b', 'c']).slice(1, 2) - t.is(await blob.text(), 'b') -}) - -test('Blob works with node-fetch Response.blob()', async t => { - const data = 'a=1' - const type = 'text/plain' - const blob = new Blob([data], { type }) - const response = new Response(Readable.from(blob.stream())) - const blob2 = await response.blob() - t.is(await blob2.text(), data) -}) - -test('Blob works with node-fetch Response.text()', async t => { - const data = 'a=1' - const type = 'text/plain' - const blob = new Blob([data], { type }) - const response = new Response(Readable.from(blob.stream())) - const text = await response.text() - t.is(text, data) -}) - -test('blob part backed up by filesystem', async t => { - const blob = blobFromSync('./LICENSE') - t.is(await blob.slice(0, 3).text(), license.slice(0, 3)) - t.is(await blob.slice(4, 11).text(), license.slice(4, 11)) -}) - -test('Reading after modified should fail', async t => { - const blob = blobFromSync('./LICENSE') - await new Promise(resolve => { - setTimeout(resolve, 500) - }) - fs.closeSync(fs.openSync('./LICENSE', 'a')) - const error = await t.throwsAsync(blob.text()) - t.is(error.constructor.name, 'DOMException') - t.is(error instanceof Error, true) - t.is(error.name, 'NotReadableError') - - const file = fileFromSync('./LICENSE') - // Above test updates the last modified date to now - t.is(typeof file.lastModified, 'number') - // The lastModifiedDate is deprecated and removed from spec - t.false('lastModifiedDate' in file) - const mod = file.lastModified - Date.now() - t.true(mod <= 0 && mod >= -500) // Close to tolerance: 0.500ms -}) - -test('Reading file after modified should fail', async t => { - const file = fileFromSync('./LICENSE') - await new Promise(resolve => { - setTimeout(resolve, 100) - }) - const now = new Date() - // Change modified time - fs.utimesSync('./LICENSE', now, now) - const error = await t.throwsAsync(file.text()) - t.is(error.constructor.name, 'DOMException') - t.is(error instanceof Error, true) - t.is(error.name, 'NotReadableError') -}) - -test('Reading from the stream created by blobFrom', async t => { - const blob = blobFromSync('./LICENSE') - const actual = await blob.text() - t.is(actual, license) -}) - -test('create a blob from path asynchronous', async t => { - const blob = await blobFrom('./LICENSE') - const actual = await blob.text() - t.is(actual, license) -}) - -test('Reading empty blobs', async t => { - const blob = blobFromSync('./LICENSE').slice(0, 0) - const actual = await blob.text() - t.is(actual, '') -}) - -test('Blob-ish class is an instance of Blob', t => { - class File { - stream () {} - - get [Symbol.toStringTag] () { - return 'File' - } - } - - t.true(new File() instanceof Blob) -}) - -test('Instanceof check returns false for nullish values', t => { - t.false(null instanceof Blob) -}) - -/** @see https://github.com/w3c/FileAPI/issues/43 - important to keep boundary value */ -test('Dose not lowercase the blob values', t => { - const type = 'multipart/form-data; boundary=----WebKitFormBoundaryTKqdrVt01qOBltBd' - t.is(new Blob([], { type }).type, type) -}) - -test('Parts are immutable', async t => { - const buf = new Uint8Array([97]) - const blob = new Blob([buf]) - buf[0] = 98 - t.is(await blob.text(), 'a') -}) - -test('Blobs are immutable', async t => { - const buf = new Uint8Array([97]) - const blob = new Blob([buf]) - const chunk = await blob.stream().getReader().read() - t.is(chunk.value[0], 97) - chunk.value[0] = 98 - t.is(await blob.text(), 'a') -}) - -// This was necessary to avoid large ArrayBuffer clones (slice) -test('Large chunks are divided into smaller chunks', async t => { - const buf = new Uint8Array(65590) - const blob = new Blob([buf]) - let i = 0 - // eslint-disable-next-line no-unused-vars - for await (const chunk of blob.stream()) { - i++ - } - - t.is(i === 2, true) -}) - -test('Can use named import - as well as default', async t => { - // eslint-disable-next-line node/no-unsupported-features/es-syntax - const { Blob, default: def } = await import('./index.js') - t.is(Blob, def) -}) - -test('default from.js exports blobFromSync', t => { - t.is(blobFromSync, syncBlob) -}) - -if (buffer.Blob) { - test('Can wrap buffer.Blob to a fetch-blob', async t => { - const blob1 = new buffer.Blob(['blob part']) - const blob2 = new Blob([blob1]) - t.is(await blob2.text(), 'blob part') - }) -} - -test('File is a instance of blob', t => { - t.true(new File([], '') instanceof Blob) -}) - -test('fileFrom returns the name', async t => { - t.is((await fileFrom('./LICENSE')).name, 'LICENSE') -}) - -test('fileFromSync returns the name', t => { - t.is(fileFromSync('./LICENSE').name, 'LICENSE') -}) - -test('fileFromSync(path, type) sets the type', t => { - t.is(fileFromSync('./LICENSE', 'text/plain').type, 'text/plain') -}) - -test('blobFromSync(path, type) sets the type', t => { - t.is(blobFromSync('./LICENSE', 'text/plain').type, 'text/plain') -}) - -test('fileFrom(path, type) sets the type', async t => { - const file = await fileFrom('./LICENSE', 'text/plain') - t.is(file.type, 'text/plain') -}) - -test('new File(,,{lastModified: 100})', t => { - const mod = new File([], '', { lastModified: 100 }).lastModified - t.is(mod, 100) -}) - -test('new File(,,{lastModified: "200"})', t => { - const mod = new File([], '', { lastModified: '200' }).lastModified - t.is(mod, 200) -}) - -test('new File(,,{lastModified: true})', t => { - const mod = new File([], '', { lastModified: true }).lastModified - t.is(mod, 1) -}) - -test('new File(,,{lastModified: new Date()})', t => { - const mod = new File([], '', { lastModified: new Date() }).lastModified - Date.now() - t.true(mod <= 0 && mod >= -20) // Close to tolerance: 0.020ms -}) - -test('new File(,,{lastModified: undefined})', t => { - const mod = new File([], '', { lastModified: undefined }).lastModified - Date.now() - t.true(mod <= 0 && mod >= -20) // Close to tolerance: 0.020ms -}) - -test('new File(,,{lastModified: null})', t => { - const mod = new File([], '', { lastModified: null }).lastModified - t.is(mod, 0) -}) - -test('Interpretes NaN value in lastModified option as 0', t => { - t.plan(3) - - const values = ['Not a Number', [], {}] - - // I can't really see anything about this in the spec, - // but this is how browsers handle type casting for this option... - for (const lastModified of values) { - const file = new File(['Some content'], 'file.txt', { lastModified }) - - t.is(file.lastModified, 0) - } -}) - -test('new File(,,{}) sets current time', t => { - const mod = new File([], '').lastModified - Date.now() - t.true(mod <= 0 && mod >= -20) // Close to tolerance: 0.020ms -}) - -test('blobFrom(path, type) sets the type', async t => { - const blob = await blobFrom('./LICENSE', 'text/plain') - t.is(blob.type, 'text/plain') -}) - -test('blobFrom(path) sets empty type', async t => { - const blob = await blobFrom('./LICENSE') - t.is(blob.type, '') -}) - -test('new File() throws with too few args', t => { - t.throws(() => new File(), { - instanceOf: TypeError, - message: 'Failed to construct \'File\': 2 arguments required, but only 0 present.' - }) -}) - -test('can slice zero sized blobs', async t => { - const blob = new Blob() - const txt = await blob.slice(0, 0).text() - t.is(txt, '') -}) - -test('returns a readable stream', t => { - const stream = new File([], '').stream() - t.true(typeof stream.getReader === 'function') -}) - -test('checking instanceof blob#stream', t => { - const stream = new File([], '').stream() - t.true(stream instanceof globalThis.ReadableStream) -}) diff --git a/test/http-loader.js b/test/http-loader.js index 3fb3576..cbbdbf0 100644 --- a/test/http-loader.js +++ b/test/http-loader.js @@ -1,8 +1,21 @@ -// https-loader.mjs -import { get } from 'https'; +import { Buffer } from 'node:buffer' +import fs from 'node:fs' +import { get } from 'node:https' -export function resolve(specifier, context, defaultResolve) { - const { parentURL = null } = context; +const fetch = url => new Promise(rs => get(url, rs)) +const cache = new URL('./.cache/', import.meta.url) + +/** + * @param {string} specifier + * @param {{ + * conditions: !Array, + * parentURL: !(string | undefined), + * }} context + * @param {Function} defaultResolve + * @returns {Promise<{ url: string }>} + */ +export async function resolve (specifier, context, defaultResolve) { + const { parentURL = null } = context // Normally Node.js would error on specifiers starting with 'https://', so // this hook intercepts them and converts them into absolute URLs to be @@ -10,42 +23,43 @@ export function resolve(specifier, context, defaultResolve) { if (specifier.startsWith('https://')) { return { url: specifier - }; + } } else if (parentURL && parentURL.startsWith('https://')) { return { url: new URL(specifier, parentURL).href - }; + } } // Let Node.js handle all other specifiers. - return defaultResolve(specifier, context, defaultResolve); + return defaultResolve(specifier, context, defaultResolve) } -export function getFormat(url, context, defaultGetFormat) { - // This loader assumes all network-provided JavaScript is ES module code. - if (url.startsWith('https://')) { - return { - format: 'module' - }; - } - - // Let Node.js handle all other URLs. - return defaultGetFormat(url, context, defaultGetFormat); -} - -export function getSource(url, context, defaultGetSource) { +export async function load (url, context, defaultLoad) { // For JavaScript to be loaded over the network, we need to fetch and // return it. if (url.startsWith('https://')) { - return new Promise((resolve, reject) => { - let data = '' - get(url, async res => { - for await (const chunk of res) data += chunk; - resolve({ source: data }); - }).on('error', (err) => reject(err)); - }); + const uuid = Buffer.from(url).toString('hex') + const cachedFile = new URL(uuid, cache) + let data = '' + + // cache remote files for 1h + if (fs.existsSync(cachedFile) && fs.statSync(cachedFile).mtimeMs > Date.now() - 1000 * 60 * 60) { + data = fs.readFileSync(cachedFile, 'utf8') + } else { + const res = await fetch(url).catch(err => err) + for await (const chunk of res) data += chunk + fs.mkdirSync(cache, { recursive: true }) + fs.writeFileSync(cachedFile, data) + } + + return { + // This example assumes all network-provided JavaScript is ES module + // code. + format: 'module', + source: data + } } // Let Node.js handle all other URLs. - return defaultGetSource(url, context, defaultGetSource); + return defaultLoad(url, context, defaultLoad) } diff --git a/test/own-misc-test.js b/test/own-misc-test.js new file mode 100644 index 0000000..999837d --- /dev/null +++ b/test/own-misc-test.js @@ -0,0 +1,217 @@ +// @ts-nocheck +// @ts-ignore + +import fs from 'node:fs' +import buffer from 'node:buffer' +import syncBlob, { blobFromSync, blobFrom, fileFromSync, fileFrom } from '../from.js' + +const license = fs.readFileSync('./LICENSE') + +test_blob(() => new Blob([ + 'a', + new Uint8Array([98]), + new Uint16Array([25699]), + new Uint8Array([101]).buffer, + Buffer.from('f'), + new Blob(['g']), + {}, + new URLSearchParams('foo') +]), { + desc: 'Blob ctor parts', + expected: 'abcdefg[object Object]foo=', + type: '', + length: 26 +}) + +promise_test(async () => { + assert_equals(fileFromSync('./LICENSE').name, 'LICENSE') + assert_equals((await fileFrom('./LICENSE')).name, 'LICENSE') +}, 'file from returns the same name') + +// Could not find similar test on WPT +test(() => { + const now = new Date() + assert_equals(new File([], '', { lastModified: now }).lastModified, +now) + assert_equals(new File([], '', { lastModified: +now }).lastModified, +now) + assert_equals(new File([], '', { lastModified: 100 }).lastModified, 100) + assert_equals(new File([], '', { lastModified: '200' }).lastModified, 200) + assert_equals(new File([], '', { lastModified: true }).lastModified, 1) + assert_equals(new File([], '', { lastModified: false }).lastModified, 0) + assert_approx_equals(new File([], '').lastModified, Date.now(), 0.020) + assert_approx_equals(new File([], '', { lastModified: undefined }).lastModified, Date.now(), 0.020) +}, 'File sets current time') + +// Could not find similar test on WPT +test(() => { + const values = ['Not a Number', [], {}, null] + // I can't really see anything about this in the spec, + // but this is how browsers handle type casting for this option... + for (const lastModified of values) { + const file = new File([], '', { lastModified }) + assert_equals(file.lastModified, 0) + } +}, 'Interpretes NaN value in lastModified option as 0') + +test(() => { + assert_equals(blobFromSync, syncBlob) +}, 'default export is named exported blobFromSync') + +promise_test(async () => { + const { Blob, default: def } = await import('../index.js') + assert_equals(Blob, def) +}, 'Can use named import - as well as default') + +// This was necessary to avoid large ArrayBuffer clones (slice) +promise_test(async t => { + const buf = new Uint8Array(65590) + const blob = new Blob([buf]) + let i = 0 + // eslint-disable-next-line no-unused-vars + for await (const chunk of blob.stream()) { + i++ + } + + assert_equals(i, 2) +}, 'Large chunks are divided into smaller chunks') + +// Couldn't find a test for this on WPT +promise_test(async () => { + const buf = new Uint8Array([97]) + const blob = new Blob([buf]) + const chunk = await blob.stream().getReader().read() + assert_equals(chunk.value[0], 97) + chunk.value[0] = 98 + assert_equals(await blob.text(), 'a') +}, 'Blobs are immutable') + +/** + * Deviation from WPT: it's important to keep boundary value + * so we don't lowercase the type + * @see https://github.com/w3c/FileAPI/issues/43 + */ +test(() => { + const type = 'multipart/form-data; boundary=----WebKitFormBoundaryTKqdrVt01qOBltBd' + assert_equals(new Blob([], { type }).type, type) + assert_equals(new File([], '', { type }).type, type) +}, 'Dose not lowercase the type') + + +test( // Because we have symbol.hasInstance it's best to test it... + () => (assert_false(null instanceof Blob), assert_false(null instanceof File)), + 'Instanceof check returns false for nullish values' +) + +test( // Because browser normally never tries things taken for granted + () => assert_equals(new Blob().toString(), '[object Blob]'), + 'blob.toString() returns [object Blob]' +) + +test( // Because browser normally never tries things taken for granted + () => assert_equals(new File([], '').toString(), '[object File]'), + 'file.toString() returns [object File]' +) + +// fetch-blob uniques is that it supports arbitrary blobs too +test(() => { + class File { + stream () {} + get [Symbol.toStringTag] () { return 'File' } + } + assert_true(new File() instanceof Blob) +}, 'Blob-ish class is an instance of Blob') + +// fetch-blob uniques is that it supports arbitrary blobs too +if (buffer.Blob) { + test_blob(() => new Blob([new buffer.Blob(['blob part'])]), { + desc: 'Can wrap buffer.Blob to a fetch-blob', + expected: 'blob part', + type: '', + }) +} + +/** + * Test if Blob can be constructed with BOM and keep it when casted to string + * Test if blob.text() can correctly remove BOM - `buffer.toString()` is bad + */ +promise_test(async () => { + const text = '{"foo": "bar"}' + const blob = new Blob([`\uFEFF${text}`]) + assert_equals(blob.size, 17) + assert_equals(await blob.text(), text) + const ab = await blob.slice(0, 3).arrayBuffer() + assert_equals_typed_array(new Uint8Array(ab), new Uint8Array([0xEF, 0xBB, 0xBF])) +}, 'Can wrap buffer.Blob to a fetch-blob') + +// Here to make sure our `toIterator` is working as intended +promise_test(async () => { + const stream = new Blob(['Some content']).stream() + + // Cancel the stream before start reading, or this will throw an error + await stream.cancel() + const reader = stream.getReader() + const { done, value: chunk } = await reader.read() + + assert_true(done) + assert_equals(chunk, undefined) +}, 'Blob stream() can be cancelled') + +/******************************************************************************/ +/* */ +/* Test Blobs backed up by the filesystem */ +/* */ +/******************************************************************************/ + +promise_test(async () => { + assert_equals(fileFromSync('./LICENSE', 'text/plain').type, 'text/plain') + assert_equals(fileFromSync('./LICENSE').type, '') + + assert_equals(blobFromSync('./LICENSE', 'text/plain').type, 'text/plain') + assert_equals(blobFromSync('./LICENSE').type, '') + + assert_equals((await fileFrom('./LICENSE', 'text/plain')).type, 'text/plain') + assert_equals((await fileFrom('./LICENSE')).type, '') + + assert_equals((await blobFrom('./LICENSE', 'text/plain')).type, 'text/plain') + assert_equals((await blobFrom('./LICENSE')).type, '') +}, 'from utilities sets correct type') + +promise_test(async () => { + assert_equals(await blobFromSync('./LICENSE').text(), license.toString()) + assert_equals(await fileFromSync('./LICENSE').text(), license.toString()) + assert_equals(await (await blobFrom('./LICENSE')).text(), license.toString()) + assert_equals(await (await fileFrom('./LICENSE')).text(), license.toString()) +}, 'blob part backed up by filesystem can be read') + +promise_test(async () => { + assert_equals(await blobFromSync('./LICENSE').text(), license.toString()) + assert_equals(await fileFromSync('./LICENSE').text(), license.toString()) + assert_equals(await (await blobFrom('./LICENSE')).text(), license.toString()) + assert_equals(await (await fileFrom('./LICENSE')).text(), license.toString()) +}, 'blob part backed up by filesystem slice correctly') + +test(async () => { + const blob = blobFromSync('./LICENSE') + await new Promise(resolve => setTimeout(resolve, 2000)) + const now = new Date() + // Change modified time + fs.utimesSync('./LICENSE', now, now) + const error = await blob.text().then(assert_unreached, e => e) + assert_equals(error.constructor.name, 'DOMException') + assert_equals(error instanceof Error, true) + assert_equals(error.name, 'NotReadableError') + + const file = fileFromSync('./LICENSE') + // Above test updates the last modified date to now + assert_equals(typeof file.lastModified, 'number') + // The lastModifiedDate is deprecated and removed from spec + assert_false('lastModifiedDate' in file) + assert_approx_equals(file.lastModified, +now, 1000) +}, 'Reading after modified should fail') + +promise_test(async () => { + assert_equals(await blobFromSync('./LICENSE').slice(0, 0).text(), '') + assert_equals(await blobFromSync('./LICENSE').slice(0, 3).text(), license.slice(0, 3).toString()) + assert_equals(await blobFromSync('./LICENSE').slice(4, 11).text(), license.slice(4, 11).toString()) + assert_equals(await blobFromSync('./LICENSE').slice(-11).text(), license.slice(-11).toString()) + assert_equals(await blobFromSync('./LICENSE').slice(4, 11).slice(2, 5).text(), license.slice(4, 11).slice(2, 5).toString()) +}, 'slicing blobs backed up by filesystem returns correct string') diff --git a/test/test-wpt-in-node.js b/test/test-wpt-in-node.js index 99b8b06..e414298 100644 --- a/test/test-wpt-in-node.js +++ b/test/test-wpt-in-node.js @@ -1,133 +1,143 @@ // Don't want to use the FileReader, don't want to lowerCase the type either // import from 'https://wpt.live/resources/testharnessreport.js' -import {File, Blob} from '../from.js' +import { File, Blob } from '../from.js' +let hasFailed globalThis.self = globalThis await import('https://wpt.live/resources/testharness.js') -// Should probably be fixed... should be able to compare a Blob to a File -delete Blob[Symbol.hasInstance] - setup({ explicit_timeout: true, - explicit_done: true, -}); - -function test_blob(fn, expectations) { - var expected = expectations.expected, - type = expectations.type, - desc = expectations.desc; - - var t = async_test(desc); - t.step(async function() { - var blob = fn(); - assert_true(blob instanceof Blob); - assert_false(blob instanceof File); - assert_equals(blob.type.toLowerCase(), type); - assert_equals(blob.size, expected.length); - assert_equals(await blob.text(), expected); - t.done(); - }); + explicit_done: true +}) + +function test_blob (fn, expectations) { + const expected = expectations.expected + const type = expectations.type + const desc = expectations.desc + const length = expectations.length + + const t = async_test(desc) + t.step(async function () { + const blob = fn() + assert_true(blob instanceof Blob) + assert_false(blob instanceof File) + assert_equals(blob.type.toLowerCase(), type) + assert_equals(await blob.text(), expected) + t.done() + }) } -function test_blob_binary(fn, expectations) { - var expected = expectations.expected, - type = expectations.type, - desc = expectations.desc; - - var t = async_test(desc); - t.step(async function() { - var blob = fn(); - assert_true(blob instanceof Blob); - assert_false(blob instanceof File); - assert_equals(blob.type.toLowerCase(), type); - assert_equals(blob.size, expected.length); - const result = await blob.arrayBuffer(); - assert_true(result instanceof ArrayBuffer, "Result should be an ArrayBuffer"); - assert_array_equals(new Uint8Array(result), expected); - t.done(); - }); +function test_blob_binary (fn, expectations) { + const expected = expectations.expected + const type = expectations.type + const desc = expectations.desc + + const t = async_test(desc) + t.step(async function () { + const blob = fn() + assert_true(blob instanceof Blob) + assert_false(blob instanceof File) + assert_equals(blob.type.toLowerCase(), type) + const result = await blob.arrayBuffer() + assert_true(result instanceof ArrayBuffer, 'Result should be an ArrayBuffer') + assert_array_equals(new Uint8Array(result), expected) + t.done() + }) } // Assert that two TypedArray objects have the same byte values globalThis.assert_equals_typed_array = (array1, array2) => { const [view1, view2] = [array1, array2].map((array) => { assert_true(array.buffer instanceof ArrayBuffer, - 'Expect input ArrayBuffers to contain field `buffer`'); - return new DataView(array.buffer, array.byteOffset, array.byteLength); - }); + 'Expect input ArrayBuffers to contain field `buffer`') + return new DataView(array.buffer, array.byteOffset, array.byteLength) + }) assert_equals(view1.byteLength, view2.byteLength, - 'Expect both arrays to be of the same byte length'); + 'Expect both arrays to be of the same byte length') - const byteLength = view1.byteLength; + const byteLength = view1.byteLength for (let i = 0; i < byteLength; ++i) { assert_equals(view1.getUint8(i), view2.getUint8(i), - `Expect byte at buffer position ${i} to be equal`); + `Expect byte at buffer position ${i} to be equal`) } } -let hasFailed +globalThis.add_result_callback((test, ...args) => { + if ([ + 'Blob with type "A"', + 'Blob with type "TEXT/HTML"', + 'Getters and value conversions should happen in order until an exception is thrown.', + 'Using type in File constructor: TEXT/PLAIN', + 'Using type in File constructor: text/plain;charset=UTF-8' + ].includes(test.name)) return -globalThis.add_result_callback(test => { - const INDENT_SIZE = 2; - var reporter = {} - if (test.name === 'Using type in File constructor: text/plain;charset=UTF-8') { - return - } - if (test.name === 'Using type in File constructor: TEXT/PLAIN') { - return - } + const INDENT_SIZE = 2 + const reporter = {} - reporter.startSuite = name => console.log(`\n ${(name)}\n`); + reporter.startSuite = name => console.log(`\n ${(name)}\n`) - reporter.pass = message => console.log((indent(("√ ") + message, INDENT_SIZE))); + reporter.pass = message => console.log((indent(('√ ') + message, INDENT_SIZE))) - reporter.fail = message => console.log((indent("\u00D7 " + message, INDENT_SIZE))); + reporter.fail = message => console.log((indent('\u00D7 ' + message, INDENT_SIZE))) - reporter.reportStack = stack => console.log((indent(stack, INDENT_SIZE * 2))); + reporter.reportStack = stack => console.log((indent(stack, INDENT_SIZE * 2))) - function indent(string, times) { - const prefix = " ".repeat(times); - return string.split("\n").map(l => prefix + l).join("\n"); + function indent (string, times) { + const prefix = ' '.repeat(times) + return string.split('\n').map(l => prefix + l).join('\n') } if (test.status === 0) { - reporter.pass(test.name); + reporter.pass(test.name) } else if (test.status === 1) { - reporter.fail(`${test.name}\n`); - reporter.reportStack(`${test.message}\n${test.stack}`); - hasFailed = true; + reporter.fail(`${test.name}\n`) + reporter.reportStack(`${test.message}\n${test.stack}`) + hasFailed = true } else if (test.status === 2) { - reporter.fail(`${test.name} (timeout)\n`); - reporter.reportStack(`${test.message}\n${test.stack}`); - hasFailed = true; + reporter.fail(`${test.name} (timeout)\n`) + reporter.reportStack(`${test.message}\n${test.stack}`) + hasFailed = true } else if (test.status === 3) { - reporter.fail(`${test.name} (incomplete)\n`); - reporter.reportStack(`${test.message}\n${test.stack}`); - hasFailed = true; + reporter.fail(`${test.name} (incomplete)\n`) + reporter.reportStack(`${test.message}\n${test.stack}`) + hasFailed = true } else if (test.status === 4) { - reporter.fail(`${test.name} (precondition failed)\n`); - reporter.reportStack(`${test.message}\n${test.stack}`); - hasFailed = true; + reporter.fail(`${test.name} (precondition failed)\n`) + reporter.reportStack(`${test.message}\n${test.stack}`) + hasFailed = true } else { - reporter.fail(`unknown test status: ${test.status}`); - hasFailed = true; + reporter.fail(`unknown test status: ${test.status}`) + hasFailed = true } - hasFailed && process.exit(1); }) globalThis.File = File globalThis.Blob = Blob globalThis.garbageCollect = () => {} -globalThis.document = {body: '[object HTMLBodyElement]'} -globalThis.test_blob = test_blob; -globalThis.test_blob_binary = test_blob_binary; - -import("https://wpt.live/FileAPI/file/File-constructor.any.js") -import("https://wpt.live/FileAPI/blob/Blob-array-buffer.any.js") -import("https://wpt.live/FileAPI/blob/Blob-slice-overflow.any.js") -import("https://wpt.live/FileAPI/blob/Blob-slice.any.js") -import("https://wpt.live/FileAPI/blob/Blob-stream.any.js") -import("https://wpt.live/FileAPI/blob/Blob-text.any.js") +globalThis.test_blob = test_blob +globalThis.test_blob_binary = test_blob_binary +// Cuz WPT don't clean up after itself +globalThis.MessageChannel = class extends MessageChannel { + constructor () { + super() + setTimeout(() => { + this.port1.close() + this.port2.close() + this.port1.onmessage = this.port2.onmessage = null + }, 100) + } +} + +import('https://wpt.live/FileAPI/file/File-constructor.any.js') +import('https://wpt.live/FileAPI/blob/Blob-constructor.any.js') +import('https://wpt.live/FileAPI/blob/Blob-array-buffer.any.js') +import('https://wpt.live/FileAPI/blob/Blob-slice-overflow.any.js') +import('https://wpt.live/FileAPI/blob/Blob-slice.any.js') +import('https://wpt.live/FileAPI/blob/Blob-stream.any.js') +import('https://wpt.live/FileAPI/blob/Blob-text.any.js') +import('./own-misc-test.js') + +hasFailed && process.exit(1) \ No newline at end of file