diff --git a/doc/api/errors.md b/doc/api/errors.md index 04cd3dc6a680e7..d74ddc6176110a 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1424,6 +1424,11 @@ is set for the `Http2Stream`. `http2.connect()` was passed a URL that uses any protocol other than `http:` or `https:`. + +### `ERR_ILLEGAL_CONSTRUCTOR` + +An attempt was made to construct an object using a non-public constructor. + ### `ERR_INCOMPATIBLE_OPTION_PAIR` diff --git a/doc/api/index.md b/doc/api/index.md index 71c415afaa673a..448f6d599fc8f5 100644 --- a/doc/api/index.md +++ b/doc/api/index.md @@ -64,6 +64,7 @@ * [VM](vm.md) * [WASI](wasi.md) * [Web Crypto API](webcrypto.md) +* [Web Streams API](webstreams.md) * [Worker threads](worker_threads.md) * [Zlib](zlib.md) diff --git a/doc/api/webstreams.md b/doc/api/webstreams.md new file mode 100644 index 00000000000000..90667c1c1bb28c --- /dev/null +++ b/doc/api/webstreams.md @@ -0,0 +1,1122 @@ +# Web Streams API + +> Stability: 1 - Experimental + +An implementation of the [WHATWG Streams Standard][]. + +```mjs +import { + ReadableStream, + WritableStream, + TransformStream, +} from 'node:stream/web'; +``` + +```cjs +const { + ReadableStream, + WritableStream, + TransformStream, +} = require('stream/web'); +``` + +## Overview + +The [WHATWG Streams Standard][] (or "web streams") defines an API for handling +streaming data. It is similar to the Node.js [Streams][] API but emerged later +and has become the "standard" API for streaming data across many JavaScript +environments. + +There are three primary types of objects + +* `ReadableStream` - Represents a source of streaming data. +* `WritableStream` - Represents a destination for streaming data. +* `TransformStream` - Represents an algorithm for transforming streaming data. + +### Example `ReadableStream` + +This example creates a simple `ReadableStream` that pushes the current +`performance.now()` timestamp once every second forever. An async iterable +is used to read the data from the stream. + +```mjs +import { + ReadableStream +} from 'node:stream/web'; + +import { + setInterval as every +} from 'node:timers/promises'; + +import { + performance +} from 'node:perf_hooks'; + +const SECOND = 1000; + +const stream = new ReadableStream({ + async start(controller) { + for await (const _ of every(SECOND)) + controller.enqueue(performance.now()); + } +}); + +for await (const value of stream) + console.log(value); +``` + +```cjs +const { + ReadableStream +} = require('stream/web'); + +const { + setInterval: every +} = require('timers/promises'); + +const { + performance +} = require('perf_hooks'); + +const SECOND = 1000; + +const stream = new ReadableStream({ + async start(controller) { + for await (const _ of every(SECOND)) + controller.enqueue(performance.now()); + } +}); + +(async () => { + for await (const value of stream) + console.log(value); +})(); +``` + +## API + +### Class: `ReadableStream` + + +#### `new ReadableStream([underlyingSource [, strategy]])` + + + +* `underlyingSource` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `ReadableStream` is created. + * `controller` {ReadableStreamDefaultController|ReadableByteStreamController} + * Returns: `undefined` or a promise fulfilled with `undefined`. + * `pull` {Function} A user-defined function that is called repeatedly when the + `ReadableStream` internal queue is not full. The operation may be sync or + async. If async, the function will not be called again until the previously + returned promise is fulfilled. + * `controller` {ReadableStreamDefaultController|ReadableByteStreamController} + * Returns: A promise fulfilled with `undefined`. + * `cancel` {Function} A user-defined function that is called when the + `ReadableStream` is canceled. + * `reason` {any} + * Returns: A promise fulfilled with `undefined`. + * `type` {string} Must be `'bytes'` or `undefined`. + * `autoAllocateChunkSize` {number} Used only when `type` is equal to + `'bytes'`. +* `strategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + + +#### `readableStream.locked` + + +* Type: {boolean} Set to `true` if there is an active reader for this + {ReadableStream}. + +The `readableStream.locked` property is `false` by default, and is +switch to `true` while there is an active reader consuming the +stream's data. + +#### `readableStream.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined` once cancelation has + been completed. + +#### `readableStream.getReader([options])` + + +* `options` {Object} + * `mode` {string} `'byob'` or `undefined` +* Returns: {ReadableStreamDefaultReader|ReadableStreamBYOBReader} + +```mjs +import { ReadableStream } from 'node:stream/web'; + +const stream = new ReadableStream(); + +const reader = stream.getReader(); + +console.log(await reader.read()); +``` + +```cjs +const { ReadableStream } = require('stream/web'); + +const stream = new ReadableStream(); + +const reader = stream.getReader(); + +reader.read().then(console.log); +``` + +Causes the `readableStream.locked` to be `true`. + +#### `readableStream.pipeThrough(transform[, options])` + + +* `transform` {Object} + * `readable` {ReadableStream} The `ReadableStream` to which + `transform.writable` will push the potentially modified data + is receives from this `ReadableStream`. + * `writable` {WritableStream} The `WritableStream` to which this + `ReadableStream`'s data will be written. +* `options` {Object} + * `preventAbort` {boolean} When `true`, errors in this `ReadableStream` + will not cause `transform.writable` to be aborted. + * `preventCancel` {boolean} When `true`, errors in the destination + `transform.writable` is not cause this `ReadableStream` to be + canceled. + * `preventClose` {boolean} When `true`, closing this `ReadableStream` + will no cause `transform.writable` to be closed. + * `signal` {AbortSignal} Allows the transfer of data to be canceled + using an {AbortController}. +* Returns: {ReadableStream} From `transform.readable`. + +Connects this {ReadableStream} to the pair of {ReadableStream} and +{WritableStream} provided in the `transform` argument such that the +data from this {ReadableStream} is written in to `transform.writable`, +possibly transformed, then pushed to `transform.readable`. Once the +pipeline is configured, `transform.readable` is returned. + +Causes the `readableStream.locked` to be `true` while the pipe operation +is active. + +```mjs +import { + ReadableStream, + TransformStream, +} from 'node:stream/web'; + +const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, +}); + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +const transformedStream = stream.pipeThrough(transform); + +for await (const chunk of transformedStream) + console.log(chunk); +``` + +```cjs +const { + ReadableStream, + TransformStream, +} = require('stream/web'); + +const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, +}); + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +const transformedStream = stream.pipeThrough(transform); + +(async () => { + for await (const chunk of transformedStream) + console.log(chunk); +})(); +``` + +#### `readableStream.pipeTo(destination, options)` + + +* `destination` {WritableStream} A {WritableStream} to which this + `ReadableStream`'s data will be written. +* `options` {Object} + * `preventAbort` {boolean} When `true`, errors in this `ReadableStream` + will not cause `transform.writable` to be aborted. + * `preventCancel` {boolean} When `true`, errors in the destination + `transform.writable` is not cause this `ReadableStream` to be + canceled. + * `preventClose` {boolean} When `true`, closing this `ReadableStream` + will no cause `transform.writable` to be closed. + * `signal` {AbortSignal} Allows the transfer of data to be canceled + using an {AbortController}. +* Returns: A promise fulfilled with `undefined` + +Causes the `readableStream.locked` to be `true` while the pipe operation +is active. + +#### `readableStream.tee()` + + +* Returns: {ReadableStream[]} + +Returns a pair of new {ReadableStream} instances to which this +`ReadableStream`'s data will be forwarded. Each will receive the +same data. + +Causes the `readableStream.locked` to be `true`. + +#### `readableStream.values([options])` + + +* `options` {Object} + * `preventCancel` {boolean} When `true`, prevents the {ReadableStream} + from being closed when the async iterator abruptly terminates. + **Defaults**: `false` + +Creates and returns an async iterator usable for consuming this +`ReadableStream`'s data. + +Causes the `readableStream.locked` to be `true` while the async iterator +is active. + +```mjs +import { Buffer } from 'node:buffer'; + +const stream = new ReadableStream(getSomeSource()); + +for await (const chunk of stream.values({ preventCancel: true })) + console.log(Buffer.from(chunk).toString()); +``` + +#### Async Iteration + +The {ReadableStream} object supports the async iterator protocol using +`for await` syntax. + +```mjs +import { Buffer } from 'buffer'; + +const stream = new ReadableStream(getSomeSource()); + +for await (const chunk of stream) + console.log(Buffer.from(chunk).toString()); +``` + +The async iterator will consume the {ReadableStream} until it terminates. + +By default, if the async iterator exits early (via either a `break`, +`return`, or a `throw`), the {ReadableStream} will be closed. To prevent +automatic closing of the {ReadableStream}, use the `readableStream.values()` +method to acquire the async iterator and set the `preventCancel` option to +`true`. + +The {ReadableStream} must not be locked (that is, it must not have an existing +active reader). During the async iteration, the {ReadableStream} will be locked. + +#### Transfering with `postMessage()` + +A {ReadableStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new ReadableStream(getReadableSourceSomehow()); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + data.getReader().read().then((chunk) => { + console.log(chunk); + }); +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `ReadableStreamDefaultReader` + + +By default, calling `readableStream.getReader()` with no arguments +will return an instance of `ReadableStreamDefaultReader`. The default +reader treats the chunks of data passed through the stream as opaque +values, which allows the {ReadableStream} to work with generally any +JavaScript value. + +#### `new ReadableStreamDefaultReader(stream)` + + +* `stream` {ReadableStream} + +Creates a new {ReadableStreamDefaultReader} that is locked to the +given {ReadableStream}. + +#### `readableStreamDefaultReader.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Cancels the {ReadableStream} and returns a promise that is fulfilled +when the underlying stream has been canceled. + +#### `readableStreamDefaultReader.closed` + + +* Type: {Promise} Fulfilled with `undefined` when the associated + {ReadableStream} is closed or this reader's lock is released. + +#### `readableStreamDefaultReader.read()` + + +* Returns: A promise fulfilled with an object: + * `value` {ArrayBuffer} + * `done` {boolean} + +Requests the next chunk of data from the underlying {ReadableStream} +and returns a promise that is fulfilled with the data once it is +available. + +#### `readableStreamDefaultReader.releaseLock()` + + +Releases this reader's lock on the underlying {ReadableStream}. + +### Class: `ReadableStreamBYOBReader` + + +The `ReadableStreamBYOBReader` is an alternative consumer for +byte-oriented {ReadableStream}'s (those that are created with +`underlyingSource.type` set equal to `'bytes`` when the +`ReadableStream` was created). + +The `BYOB` is short for "bring your own buffer". This is a +pattern that allows for more efficient reading of byte-oriented +data that avoids extraneous copying. + +```mjs +import { + open +} from 'node:fs/promises'; + +import { + ReadableStream +} from 'node:stream/web'; + +import { Buffer } from 'node:buffer'; + +class Source { + type = 'bytes'; + autoAllocateChunkSize = 1024; + + async start(controller) { + this.file = await open(new URL(import.meta.url)); + this.controller = controller; + } + + async pull(controller) { + const view = controller.byobRequest?.view; + const { + bytesRead, + } = await this.file.read({ + buffer: view, + offset: view.byteOffset, + length: view.byteLength + }); + + if (bytesRead === 0) { + await this.file.close(); + this.controller.close(); + } + controller.byobRequest.respond(bytesRead); + } +} + +const stream = new ReadableStream(new Source()); + +async function read(stream) { + const reader = stream.getReader({ mode: 'byob' }); + + const chunks = []; + let result; + do { + result = await reader.read(Buffer.alloc(100)); + if (result.value !== undefined) + chunks.push(Buffer.from(result.value)); + } while (!result.done); + + return Buffer.concat(chunks); +} + +const data = await read(stream); +console.log(Buffer.from(data).toString()); +``` + +#### `new ReadableStreamBYOBReader(stream)` + + +* `stream` {ReadableStream} + +Creates a new `ReadableStreamBYOBReader` that is locked to the +given {ReadableStream}. + +#### `readableStreamBYOBReader.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Cancels the {ReadableStream} and returns a promise that is fulfilled +when the underlying stream has been canceled. + +#### `readableStreamBYOBReader.closed` + + +* Type: {Promise} Fulfilled with `undefined` when the associated + {ReadableStream} is closed or this reader's lock is released. + +#### `readableStreamBYOBReader.read(view)` + + +* `view` {Buffer|TypedArray|DataView} +* Returns: A promise fulfilled with an object: + * `value` {ArrayBuffer} + * `done` {boolean} + +Requests the next chunk of data from the underlying {ReadableStream} +and returns a promise that is fulfilled with the data once it is +available. + +Do not pass a pooled {Buffer} object instance in to this method. +Pooled `Buffer` objects are created using `Buffer.allocUnsafe()`, +or `Buffer.from()`, or are often returned by various `fs` module +callbacks. These types of `Buffer`s use a shared underlying +{ArrayBuffer} object that contains all of the data from all of +the pooled `Buffer` instances. When a `Buffer`, {TypedArray}, +or {DataView} is passed in to `readableStreamBYOBReader.read()`, +the view's underlying `ArrayBuffer` is *detached*, invalidating +all existing views that may exist on that `ArrayBuffer`. This +can have disasterous consequences for your application. + +#### `readableStreamBYOBReader.releaseLock()` + + +Releases this reader's lock on the underlying {ReadableStream}. + +### Class: `ReadableStreamDefaultController` + + +Every {ReadableStream} has a controller that is responsible for +the internal state and management of the stream's queue. The +`ReadableStreamDefaultController` is the default controller +implementation for `ReadableStream`s that are not byte-oriented. + +#### `readableStreamDefaultController.close()` + + +Closes the {ReadableStream} to which this controller is associated. + +#### `readableStreamDefaultController.desiredSize` + + +* Type: {number} + +Returns the amount of data remaining to fill the {ReadableStream}'s +queue. + +#### `readableStreamDefaultController.enqueue(chunk)` + + +* `chunk` {any} + +Appends a new chunk of data to the {ReadableStream}'s queue. + +#### `readableStreamDefaultController.error(error)` + + +* `error` {any} + +Signals an error that causes the {ReadableStream} to error and close. + +### Class: `ReadableByteStreamController` + + +Every {ReadableStream} has a controller that is responsible for +the internal state and management of the stream's queue. The +`ReadableByteStreamController` is for byte-oriented `ReadableStream`s. + +#### `readableByteStreamController.byobRequest` + + +* Type: {ReadableStreamBYOBRequest} + +#### `readableByteStreamController.close()` + + +Closes the {ReadableStream} to which this controller is associated. + +#### `readableByteStreamController.desiredSize` + + +* Type: {number} + +Returns the amount of data remaining to fill the {ReadableStream}'s +queue. + +#### `readableByteStreamController.enqueue(chunk)` + + +* `chunk`: {Buffer|TypedArray|DataView} + +Appends a new chunk of data to the {ReadableStream}'s queue. + +#### `readableByteStreamController.error(error)` + + +* `error` {any} + +Signals an error that causes the {ReadableStream} to error and close. + +### Class: `ReadableStreamBYOBRequest` + + +When using `ReadableByteStreamController` in byte-oriented +streams, and when using the `ReadableStreamBYOBReader`, +the `readableByteStreamController.byobRequest` property +provides access to a `ReadableStreamBYOBRequest` instance +that represents the current read request. The object +is used to gain access to the `ArrayBuffer`/`TypedArray` +that has been provided for the read request to fill, +and provides methods for signaling that the data has +been provided. + +#### `readableStreamBYOBRequest.respond(bytesWritten)` + + +* `bytesWritten` {number} + +Signals that a `bytesWritten` number of bytes have been written +to `readableStreamBYOBRequest.view`. + +#### `readableStreamBYOBRequest.respondWithNewView(view)` + + +* `view` {Buffer|TypedArray|DataView} + +Signals that the request has been fulfilled with bytes written +to a new `Buffer`, `TypedArray`, or `DataView`. + +#### `readableStreamBYOBRequest.view` + + +* Type: {Buffer|TypedArray|DataView} + +### Class: `WritableStream` + + +The `WritableStream` is a destination to which stream data is sent. + +```mjs +import { + WritableStream +} from 'node:stream/web'; + +const stream = new WritableStream({ + write(chunk) { + console.log(chunk); + } +}); + +await stream.getWriter().write('Hello World'); +``` + +#### `new WritableStream([underlyingSink[, strategy]])` + + +* `underlyingSink` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `WritableStream` is created. + * `controller` {WritableStreamDefaultController} + * Returns: `undefined` or a promise fulfilled with `undefined`. + * `write` {Function} A user-defined function that is invoked when a chunk of + data has been written to the `WritableStream`. + * `chunk` {any} + * `controller` {WritableStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `close` {Function} A user-defined function that is called when the + `WritableStream` is closed. + * Returns: A promise fulfilled with `undefined`. + * `abort` {Function} A user-defined function that is called to abruptly close + the `WritableStream`. + * `reason` {any} + * Returns: A promise fulfilled with `undefined`. + * `type` {any} The `type` option is reserved for future use and *must* be + undefined. +* `strategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + +#### `writableStream.abort([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Abruptly terminates the `WritableStream`. All queued writes will be +canceled with their associated promises rejected. + +#### `writableStream.close()` + + +* Returns: A promise fulfilled with `undefined`. + +Closes the `WritableStream` when no additional writes are expected. + +#### `writableStream.getWriter()` + + +* Returns: {WritableStreamDefaultWriter} + +Creates and creates a new writer instance that can be used to write +data into the `WritableStream`. + +#### `writableStream.locked` + + +* Type: {boolean} + +The `writableStream.locked` property is `false` by default, and is +switched to `true` while there is an active writer attached to this +`WritableStream`. + +#### Transfering with postMessage() + +A {WritableStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new WritableStream(getWritableSinkSomehow()); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + data.getWriter().write('hello'); +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `WritableStreamDefaultWriter` + + +#### `new WritableStreamDefaultWriter(stream)` + + +* `stream` {WritableStream} + +Creates a new `WritableStreamDefaultWriter` that is locked to the given +`WritableStream`. + +#### `writableStreamDefaultWriter.abort([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Abruptly terminates the `WritableStream`. All queued writes will be +canceled with their associated promises rejected. + +#### `writableStreamDefaultWriter.close()` + + +* Returns: A promise fulfilled with `undefined`. + +Closes the `WritableStream` when no additional writes are expected. + +#### `writableStreamDefaultWriter.closed` + + +* Type: A promise that is fulfilled with `undefined` when the + associated {WritableStream} is closed or this writer's lock is + released. + +#### `writableStreamDefaultWriter.desiredSize` + + +* Type: {number} + +The amount of data required to fill the {WritableStream}'s queue. + +#### `writableStreamDefaultWriter.ready` + + +* type: A promise that is fulfilled with `undefined` when the + writer is ready to be used. + +#### `writableStreamDefaultWriter.releaseLock()` + + +Releases this writer's lock on the underlying {ReadableStream}. + +#### `writableStreamDefaultWriter.write([chunk])` + + +* `chunk`: {any} +* Returns: A promise fulfilled with `undefined`. + +Appends a new chunk of data to the {WritableStream}'s queue. + +### Class: `WritableStreamDefaultController` + + +The `WritableStreamDefaultController` manage's the {WritableStream}'s +internal state. + +#### `writableStreamDefaultController.abortReason` + +* Type: {any} The `reason` value passed to `writableStream.abort()`. + +#### `writableStreamDefaultController.error(error)` + + +* `error` {any} + +Called by user-code to signal that an error has occurred while processing +the `WritableStream` data. When called, the {WritableStream} will be aborted, +with currently pending writes canceled. + +#### `writableStreamDefaultController.signal` + +* Type: {AbortSignal} An `AbortSignal` that can be used to cancel pending + write or close operations when a {WritableStream} is aborted. + +### Class: `TransformStream` + + +A `TransformStream` consists of a {ReadableStream} and a {WritableStream} that +are connected such that the data written to the `WritableStream` is received, +and potentially transformed, before being pushed into the `ReadableStream`'s +queue. + +```mjs +import { + TransformStream +} from 'node:stream/web'; + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +await Promise.all([ + transform.writable.getWriter().write('A'), + transform.readable.getReader().read(), +]); +``` + +#### `new TransformStream([transformer[, writableStrategy[, readableStrategy]]])` + + +* `transformer` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `TransformStream` is created. + * `controller` {TransformStreamDefaultController} + * Returns: `undefined` or a promise fulfilled with `undefined` + * `transform` {Function} A user-defined function that receives, and + potentially modifies, a chunk of data written to `transformStream.writable`, + before forwarding that on to `transformStream.readable`. + * `chunk` {any} + * `controller` {TransformStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `flush` {Function} A user-defined function that is called immediately before + the writable side of the `TransformStream` is closed, signaling the end of + the transformation process. + * `controller` {TransformStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `readableType` {any} the `readableType` option is reserved for future use + and *must* be `undefined. + * `writableType` {any} the `writableType` option is reserved for future use + and *must* be `undefined. +* `writableStrategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} +* `readableStrategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + +#### `transformStream.readable` + + +* Type: {ReadableStream} + +#### `transformStream.writable` + + +* Type: {WritableStream} + +#### Transfering with postMessage() + +A {TransformStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new TransformStream(); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + const { writable, readable } = data; + // ... +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `TransformStreamDefaultController` + + +The `TransformStreamDefaultController` manages the internal state +of the `TransformStream`. + +#### `transformStreamDefaultController.desiredSize` + + +* Type: {number} + +The amount of data required to fill the readable side's queue. + +#### `transformStreamDefaultController.enqueue([chunk])` + + +* `chunk` {any} + +Appends a chunk of data to the readable side's queue. + +#### `transformStreamDefaultController.error([reason])` + + +* `reason` {any} + +Signals to both the readable and writable side that an error has occured +while processing the transform data, causing both sides to be abruptly +closed. + +#### `transformStreamDefaultController.terminate()` + + +Closes the readable side of the transport and causes the writable side +to be abruptly closed with an error. + +### Class: `ByteLengthQueuingStrategy` + + +#### `new ByteLengthQueuingStrategy(options)` + + +* `options` {Object} + * `highWaterMark` {number} + +#### `byteLengthQueuingStrategy.highWaterMark` + + +* Type: {number} + +#### `byteLengthQueuingStrategy.size` + + +* Type: {Function} + * `chunk` {any} + * Returns: {number} + +### Class: `CountQueuingStrategy` + + +#### `new CountQueuingStrategy(options)` + + +* `options` {Object} + * `highWaterMark` {number} + +#### `countQueuingStrategy.highWaterMark` + + +* Type: {number} + +#### `countQueuingStrategy.size` + + +* Type: {Function} + * `chunk` {any} + * Returns: {number} + +[Streams]: stream.md +[WHATWG Streams Standard]: https://streams.spec.whatwg.org/ diff --git a/lib/internal/abort_controller.js b/lib/internal/abort_controller.js index 6c80aa7bf4f2b3..e6ee07052617d5 100644 --- a/lib/internal/abort_controller.js +++ b/lib/internal/abort_controller.js @@ -143,6 +143,7 @@ ObjectDefineProperty(AbortController.prototype, SymbolToStringTag, { }); module.exports = { + kAborted, AbortController, AbortSignal, }; diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 13b56311d370b8..ed3fa3787e5eec 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -1033,6 +1033,7 @@ E('ERR_HTTP_SOCKET_ENCODING', 'Changing the socket encoding is not allowed per RFC7230 Section 3.', Error); E('ERR_HTTP_TRAILER_INVALID', 'Trailers are invalid with this transfer encoding', Error); +E('ERR_ILLEGAL_CONSTRUCTOR', 'Illegal constructor', TypeError); E('ERR_INCOMPATIBLE_OPTION_PAIR', 'Option "%s" cannot be used in combination with option "%s"', TypeError); E('ERR_INPUT_TYPE_NOT_ALLOWED', '--input-type can only be used with string ' + @@ -1256,8 +1257,8 @@ E('ERR_INVALID_RETURN_VALUE', (input, name, value) => { } return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`; -}, TypeError); -E('ERR_INVALID_STATE', 'Invalid state: %s', Error); +}, TypeError, RangeError); +E('ERR_INVALID_STATE', 'Invalid state: %s', Error, TypeError, RangeError); E('ERR_INVALID_SYNC_FORK_INPUT', 'Asynchronous forks do not support ' + 'Buffer, TypedArray, DataView or string input: %s', @@ -1361,7 +1362,7 @@ E('ERR_NO_CRYPTO', 'Node.js is not compiled with OpenSSL crypto support', Error); E('ERR_NO_ICU', '%s is not supported on Node.js compiled without ICU', TypeError); -E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error); +E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error, TypeError); E('ERR_OUT_OF_RANGE', (str, range, input, replaceDefaultBoolean = false) => { assert(range, 'Missing "range" argument'); diff --git a/lib/internal/per_context/primordials.js b/lib/internal/per_context/primordials.js index 42250ffb422d6e..4dfb4dea85ef2a 100644 --- a/lib/internal/per_context/primordials.js +++ b/lib/internal/per_context/primordials.js @@ -415,5 +415,10 @@ primordials.SafePromisePrototypeFinally = (thisPromise, onFinally) => .then(a, b) ); +primordials.AsyncIteratorPrototype = + primordials.ReflectGetPrototypeOf( + primordials.ReflectGetPrototypeOf( + async function* () {}).prototype); + ObjectSetPrototypeOf(primordials, null); ObjectFreeze(primordials); diff --git a/lib/internal/webstreams/queuingstrategies.js b/lib/internal/webstreams/queuingstrategies.js new file mode 100644 index 00000000000000..d8750665bd5e86 --- /dev/null +++ b/lib/internal/webstreams/queuingstrategies.js @@ -0,0 +1,168 @@ +'use strict'; + +const { + ObjectDefineProperties, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_INVALID_THIS, + ERR_MISSING_OPTION, + }, +} = require('internal/errors'); + +const { + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + customInspect, + isBrandCheck, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + validateObject, +} = require('internal/validators'); + +const isByteLengthQueuingStrategy = + isBrandCheck('ByteLengthQueuingStrategy'); + +const isCountQueuingStrategy = + isBrandCheck('CountQueuingStrategy'); + +/** + * @callback QueuingStrategySize + * @param {any} chunk + * @returns {number} + * + * @typedef {{ + * highWaterMark : number, + * size? : QueuingStrategySize, + * }} QueuingStrategy + */ + +// eslint-disable-next-line func-name-matching,func-style +const byteSizeFunction = function size(chunk) { return chunk.byteLength; }; + +// eslint-disable-next-line func-name-matching,func-style +const countSizeFunction = function size() { return 1; }; + +/** + * @type {QueuingStrategy} + */ +class ByteLengthQueuingStrategy { + [kType] = 'ByteLengthQueuingStrategy'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {{ + * highWaterMark : number + * }} init + */ + constructor(init) { + validateObject(init, 'init'); + if (init.highWaterMark === undefined) + throw new ERR_MISSING_OPTION('options.highWaterMark'); + + // The highWaterMark value is not checked until the strategy + // is actually used, per the spec. + this[kState] = { + highWaterMark: +init.highWaterMark, + }; + } + + /** + * @readonly + * @type {number} + */ + get highWaterMark() { + if (!isByteLengthQueuingStrategy(this)) + throw new ERR_INVALID_THIS('ByteLengthQueuingStrategy'); + return this[kState].highWaterMark; + } + + /** + * @type {QueuingStrategySize} + */ + get size() { + if (!isByteLengthQueuingStrategy(this)) + throw new ERR_INVALID_THIS('ByteLengthQueuingStrategy'); + return byteSizeFunction; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + highWaterMark: this.highWaterMark, + }); + } +} + +ObjectDefineProperties(ByteLengthQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true }, +}); + +/** + * @type {QueuingStrategy} + */ +class CountQueuingStrategy { + [kType] = 'CountQueuingStrategy'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {{ + * highWaterMark : number + * }} init + */ + constructor(init) { + validateObject(init, 'init'); + if (init.highWaterMark === undefined) + throw new ERR_MISSING_OPTION('options.highWaterMark'); + + // The highWaterMark value is not checked until the strategy + // is actually used, per the spec. + this[kState] = { + highWaterMark: +init.highWaterMark, + }; + } + + /** + * @readonly + * @type {number} + */ + get highWaterMark() { + if (!isCountQueuingStrategy(this)) + throw new ERR_INVALID_THIS('CountQueuingStrategy'); + return this[kState].highWaterMark; + } + + /** + * @type {QueuingStrategySize} + */ + get size() { + if (!isCountQueuingStrategy(this)) + throw new ERR_INVALID_THIS('CountQueuingStrategy'); + return countSizeFunction; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + highWaterMark: this.highWaterMark, + }); + } +} + +ObjectDefineProperties(CountQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true }, +}); + +module.exports = { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +}; diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js new file mode 100644 index 00000000000000..a8024c64af2353 --- /dev/null +++ b/lib/internal/webstreams/readablestream.js @@ -0,0 +1,2740 @@ +'use strict'; + +/* eslint-disable no-use-before-define */ + +const { + ArrayBuffer, + ArrayBufferPrototypeSlice, + ArrayPrototypePush, + ArrayPrototypeShift, + DataViewCtor, + FunctionPrototypeBind, + FunctionPrototypeCall, + MathMin, + NumberIsInteger, + ObjectCreate, + ObjectDefineProperties, + ObjectSetPrototypeOf, + Promise, + PromisePrototypeCatch, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + PromiseAll, + ReflectConstruct, + Symbol, + SymbolAsyncIterator, + SymbolToStringTag, + Uint8Array, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + isArrayBufferView, + isDataView, +} = require('util/types'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + serialize, + deserialize, +} = require('v8'); + +const { + validateObject, +} = require('internal/validators'); + +const { + kAborted, +} = require('internal/abort_controller'); + +const { + MessageChannel, +} = require('internal/worker/io'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + queueMicrotask, +} = require('internal/process/task_queues'); + +const { + ArrayBufferViewGetBuffer, + ArrayBufferViewGetByteLength, + ArrayBufferViewGetByteOffset, + ArrayBufferGetByteLength, + AsyncIterator, + copyArrayBuffer, + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + resetQueue, + setPromiseHandled, + transferArrayBuffer, + nonOpCancel, + nonOpPull, + nonOpStart, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + WritableStreamDefaultWriter, + + isWritableStream, + isWritableStreamLocked, + isWritableStreamDefaultController, + isWritableStreamDefaultWriter, + + writableStreamAbort, + writableStreamCloseQueuedOrInFlight, + writableStreamDefaultWriterCloseWithErrorPropagation, + writableStreamDefaultWriterRelease, + writableStreamDefaultWriterWrite, +} = require('internal/webstreams/writablestream'); + +const assert = require('internal/assert'); + +const kCancel = Symbol('kCancel'); +const kClose = Symbol('kClose'); +const kChunk = Symbol('kChunk'); +const kError = Symbol('kError'); +const kPull = Symbol('kPull'); + +/** + * @typedef {import('../abort_controller').AbortSignal} AbortSignal + * @typedef {import('./queuingstrategies').QueuingStrategy} QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * @typedef {import('./writablestream').WritableStream} WritableStream + * + * @typedef {ReadableStreamDefaultController | ReadableByteStreamController + * } ReadableStreamController + * + * @typedef {ReadableStreamDefaultReader | ReadableStreamBYOBReader + * } ReadableStreamReader + * + * @callback UnderlyingSourceStartCallback + * @param {ReadableStreamController} controller + * @returns { any | Promise } + * + * @callback UnderlyingSourcePullCallback + * @param {ReadableStreamController} controller + * @returns { Promise } + * + * @callback UnderlyingSourceCancelCallback + * @param {any} reason + * @returns { Promise } + * + * @typedef {{ + * readable: ReadableStream, + * writable: WritableStream, + * }} ReadableWritablePair + * + * @typedef {{ + * preventClose? : boolean, + * preventAbort? : boolean, + * preventCancel? : boolean, + * signal? : AbortSignal, + * }} StreamPipeOptions + * + * @typedef {{ + * start? : UnderlyingSourceStartCallback, + * pull? : UnderlyingSourcePullCallback, + * cancel? : UnderlyingSourceCancelCallback, + * type? : "bytes", + * autoAllocateChunkSize? : number + * }} UnderlyingSource + * + */ + +class ReadableStream { + [kType] = 'ReadableStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {UnderlyingSource} [source] + * @param {QueuingStrategy} [strategy] + */ + constructor(source = {}, strategy = {}) { + if (source === null) + throw new ERR_INVALID_ARG_VALUE('source', 'Object', source); + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port1: undefined, + port2: undefined, + promise: undefined, + } + }; + // The spec requires handling of the strategy first + // here. Specifically, if getting the size and + // highWaterMark from the strategy fail, that has + // to trigger a throw before getting the details + // from the source. So be sure to keep these in + // this order. + const size = strategy?.size; + const highWaterMark = strategy?.highWaterMark; + const type = source.type; + + if (`${type}` === 'bytes') { + if (size !== undefined) + throw new ERR_INVALID_ARG_VALUE.RangeError('strategy.size', size); + setupReadableByteStreamControllerFromSource( + this, + source, + extractHighWaterMark(highWaterMark, 0)); + return; + } + + if (type !== undefined) + throw new ERR_INVALID_ARG_VALUE('source.type', type); + setupReadableStreamDefaultControllerFromSource( + this, + source, + extractHighWaterMark(highWaterMark, 1), + extractSizeAlgorithm(size)); + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {boolean} + */ + get locked() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + return isReadableStreamLocked(this); + } + + /** + * @param {any} [reason] + * @returns { Promise } + */ + cancel(reason = undefined) { + if (!isReadableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStream')); + if (isReadableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('ReadableStream is locked')); + } + return readableStreamCancel(this, reason); + } + + /** + * @param {{ + * mode? : "byob" + * }} [options] + * @returns {ReadableStreamReader} + */ + getReader(options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + validateObject(options, 'options', { nullable: true, allowFunction: true }); + const mode = options?.mode; + + if (mode === undefined) + return new ReadableStreamDefaultReader(this); + + if (`${mode}` !== 'byob') + throw new ERR_INVALID_ARG_VALUE('options.mode', mode); + return new ReadableStreamBYOBReader(this); + } + + /** + * @param {ReadableWritablePair} transform + * @param {StreamPipeOptions} [options] + * @returns {ReadableStream} + */ + pipeThrough(transform, options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + const readable = transform?.readable; + if (!isReadableStream(readable)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.readable', + 'ReadableStream', + readable); + } + const writable = transform?.writable; + if (!isWritableStream(writable)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.writable', + 'WritableStream', + writable); + } + + // The web platform tests require that these be handled one at a + // time and in a specific order. options can be null or undefined. + const preventAbort = options?.preventAbort; + const preventCancel = options?.preventCancel; + const preventClose = options?.preventClose; + const signal = options?.signal; + + if (signal !== undefined && signal?.[kAborted] === undefined) + throw new ERR_INVALID_ARG_TYPE('options.signal', 'AbortSignal', signal); + + if (isReadableStreamLocked(this)) + throw new ERR_INVALID_STATE.TypeError('The ReadableStream is locked'); + if (isWritableStreamLocked(writable)) + throw new ERR_INVALID_STATE.TypeError('The WritableStream is locked'); + + const promise = readableStreamPipeTo( + this, + writable, + !!preventClose, + !!preventAbort, + !!preventCancel, + signal); + setPromiseHandled(promise); + + return readable; + } + + /** + * @param {WritableStream} destination + * @param {StreamPipeOptions} [options] + * @returns {Promise} + */ + pipeTo(destination, options = {}) { + try { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + if (!isWritableStream(destination)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.writable', + 'WritableStream', + destination); + } + + const preventAbort = options?.preventAbort; + const preventCancel = options?.preventCancel; + const preventClose = options?.preventClose; + const signal = options?.signal; + + if (signal !== undefined && signal?.[kAborted] === undefined) + throw new ERR_INVALID_ARG_TYPE('options.signal', 'AbortSignal', signal); + + if (isReadableStreamLocked(this)) + throw new ERR_INVALID_STATE.TypeError('The ReadableStream is locked'); + if (isWritableStreamLocked(destination)) + throw new ERR_INVALID_STATE.TypeError('The WritableStream is locked'); + + return readableStreamPipeTo( + this, + destination, + !!preventClose, + !!preventAbort, + !!preventCancel, + signal); + } catch (error) { + return PromiseReject(error); + } + } + + /** + * @returns {ReadableStream[]} + */ + tee() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + return readableStreamTee(this, false); + } + + /** + * @param {{ + * preventCancel? : boolean, + * }} [options] + * @returns {AsyncIterable} + */ + values(options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + validateObject(options, 'options'); + const { + preventCancel = false, + } = options; + + const reader = new ReadableStreamDefaultReader(this); + let done = false; + let started = false; + let current; + + // The nextSteps function is not an async function in order + // to make it more efficient. Because nextSteps explicitly + // creates a Promise and returns it in the common case, + // making it an async function just causes two additional + // unnecessary Promise allocations to occur, which just add + // cost. + function nextSteps() { + if (done) + return PromiseResolve({ done: true, value: undefined }); + + if (reader[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not bound to a ReadableStream')); + } + const promise = createDeferredPromise(); + + readableStreamDefaultReaderRead(reader, { + [kChunk](chunk) { + current = undefined; + promise.resolve({ value: chunk, done: false }); + }, + [kClose]() { + current = undefined; + done = true; + readableStreamReaderGenericRelease(reader); + promise.resolve({ done: true, value: undefined }); + }, + [kError](error) { + current = undefined; + done = true; + readableStreamReaderGenericRelease(reader); + promise.reject(error); + } + }); + return promise.promise; + } + + async function returnSteps(value) { + if (done) + return { done: true, value }; + done = true; + + if (reader[kState].stream === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'The reader is not bound to a ReadableStream'); + } + assert(!reader[kState].readRequests.length); + if (!preventCancel) { + const result = readableStreamReaderGenericCancel(reader, value); + readableStreamReaderGenericRelease(reader); + await result; + return { done: true, value }; + } + + readableStreamReaderGenericRelease(reader); + return { done: true, value }; + } + + // TODO(@jasnell): Explore whether an async generator + // can be used here instead of a custom iterator object. + return ObjectSetPrototypeOf({ + // Changing either of these functions (next or return) + // to async functions causes a failure in the streams + // Web Platform Tests that check for use of a modified + // Promise.prototype.then. Since the await keyword + // uses Promise.prototype.then, it is open to prototype + // polution, which causes the test to fail. The other + // await uses here do not trigger that failure because + // the test that fails does not trigger those code paths. + next() { + // If this is the first read, delay by one microtask + // to ensure that the controller has had an opportunity + // to properly start and perform the initial pull. + // TODO(@jasnell): The spec doesn't call this out so + // need to investigate if it's a bug in our impl or + // the spec. + if (!started) { + current = PromiseResolve(); + started = true; + } + current = current !== undefined ? + PromisePrototypeThen(current, nextSteps, nextSteps) : + nextSteps(); + return current; + }, + + return(error) { + return current ? + PromisePrototypeThen( + current, + () => returnSteps(error), + () => returnSteps(error)) : + returnSteps(error); + }, + + [SymbolAsyncIterator]() { return this; } + }, AsyncIterator); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + locked: this.locked, + state: this[kState].state, + }); + } + + [kTransfer]() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + if (this.locked) { + this[kState].transfer.port1?.close(); + this[kState].transfer.port1 = undefined; + this[kState].transfer.port2 = undefined; + throw new DOMException( + 'Cannot transfer a locked ReadableStream', + 'DataCloneError'); + } + + const { + writable, + promise, + } = lazyTransfer().newCrossRealmWritableSink( + this, + this[kState].transfer.port1); + + this[kState].transfer.writable = writable; + this[kState].transfer.promise = promise; + + return { + data: { port: this[kState].transfer.port2 }, + deserializeInfo: + 'internal/webstreams/readablestream:TransferedReadableStream' + }; + } + + [kTransferList]() { + const { port1, port2 } = new MessageChannel(); + this[kState].transfer.port1 = port1; + this[kState].transfer.port2 = port2; + return [ port2 ]; + } + + [kDeserialize]({ port }) { + const transfer = lazyTransfer(); + setupReadableStreamDefaultControllerFromSource( + this, + new transfer.CrossRealmTransformReadableSource(port), + 0, () => 1); + } +} + +ObjectDefineProperties(ReadableStream.prototype, { + [SymbolAsyncIterator]: { + configurable: true, + enumerable: false, + writable: true, + value: ReadableStream.prototype.values, + }, + locked: { enumerable: true }, + cancel: { enumerable: true }, + getReader: { enumerable: true }, + pipeThrough: { enumerable: true }, + pipeTo: { enumerable: true }, + tee: { enumerable: true }, +}); + +function TransferedReadableStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'ReadableStream'; + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port: undefined, + promise: undefined, + } + }; + }, + [], ReadableStream)); +} +TransferedReadableStream.prototype[kDeserialize] = () => {}; + +class ReadableStreamBYOBRequest { + [kType] = 'ReadableStreamBYOBRequest'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {ArrayBufferView} + */ + get view() { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + return this[kState].view; + } + + /** + * @param {number} bytesWritten + */ + respond(bytesWritten) { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + const { + view, + controller, + } = this[kState]; + if (controller === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'This BYOB request has been invalidated'); + } + + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + if (viewByteLength === 0 || viewBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'View ArrayBuffer is zero-length or detached'); + } + + readableByteStreamControllerRespond(controller, bytesWritten); + } + + /** + * @param {ArrayBufferView} view + */ + respondWithNewView(view) { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + const { + controller, + } = this[kState]; + + if (controller === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'This BYOB request has been invalidated'); + } + + readableByteStreamControllerRespondWithNewView(controller, view); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + view: this.view, + controller: this[kState].controller, + }); + } +} + +ObjectDefineProperties(ReadableStreamBYOBRequest.prototype, { + view: { enumerable: true }, + respond: { enumerable: true }, + respondWithNewView: { enumerable: true }, +}); + +function createReadableStreamBYOBRequest(controller, view) { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStreamBYOBRequest'; + this[kState] = { + controller, + view, + }; + }, + [], + ReadableStreamBYOBRequest + ); +} + +class DefaultReadRequest { + constructor() { + this[kState] = createDeferredPromise(); + } + + [kChunk](value) { + this[kState].resolve?.({ value, done: false }); + } + + [kClose]() { + this[kState].resolve?.({ value: undefined, done: true }); + } + + [kError](error) { + this[kState].reject?.(error); + } + + get promise() { return this[kState].promise; } +} + +class ReadIntoRequest { + constructor() { + this[kState] = createDeferredPromise(); + } + + [kChunk](value) { + this[kState].resolve?.({ value, done: false }); + } + + [kClose](value) { + this[kState].resolve?.({ value, done: true }); + } + + [kError](error) { + this[kState].reject?.(error); + } + + get promise() { return this[kState].promise; } +} + +class ReadableStreamDefaultReader { + [kType] = 'ReadableStreamDefaultReader'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {ReadableStream} stream + */ + constructor(stream) { + if (!isReadableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'ReadableStream', stream); + this[kState] = { + readRequests: [], + stream: undefined, + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + }; + setupReadableStreamDefaultReader(this, stream); + } + + /** + * @returns {Promise<{ + * value : any, + * done : boolean + * }>} + */ + read() { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + const readRequest = new DefaultReadRequest(); + readableStreamDefaultReaderRead(this, readRequest); + return readRequest.promise; + } + + releaseLock() { + if (!isReadableStreamDefaultReader(this)) + throw new ERR_INVALID_THIS('ReadableStreamDefaultReader'); + if (this[kState].stream === undefined) + return; + if (this[kState].readRequests.length) { + throw new ERR_INVALID_STATE.TypeError( + 'Cannot release with pending read requests'); + } + readableStreamReaderGenericRelease(this); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + return this[kState].close.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + if (this[kState].stream === undefined) { + return PromiseReject(new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + readRequests: this[kState].readRequests.length, + close: this[kState].close.promise, + }); + } +} + +ObjectDefineProperties(ReadableStreamDefaultReader.prototype, { + closed: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + cancel: { enumerable: true }, +}); + +class ReadableStreamBYOBReader { + [kType] = 'ReadableStreamBYOBReader'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {ReadableStream} stream + */ + constructor(stream) { + if (!isReadableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'ReadableStream', stream); + this[kState] = { + stream: undefined, + requestIntoRequests: [], + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + }; + setupReadableStreamBYOBReader(this, stream); + } + + /** + * @param {ArrayBufferView} view + * @returns {Promise<{ + * view : ArrayBufferView, + * done : boolean, + * }>} + */ + read(view) { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + if (!isArrayBufferView(view)) { + return PromiseReject( + new ERR_INVALID_ARG_TYPE( + 'view', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + view)); + } + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + if (viewByteLength === 0 || viewBufferByteLength === 0) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'View ArrayBuffer is zero-length or detached')); + } + // Supposed to assert here that the view's buffer is not + // detached, but there's no API available to use to check that. + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + const readIntoRequest = new ReadIntoRequest(); + readableStreamBYOBReaderRead(this, view, readIntoRequest); + return readIntoRequest.promise; + } + + releaseLock() { + if (!isReadableStreamBYOBReader(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBReader'); + if (this[kState].stream === undefined) + return; + if (this[kState].readIntoRequests.length) { + throw new ERR_INVALID_STATE.TypeError( + 'Cannot release with pending read requests'); + } + readableStreamReaderGenericRelease(this); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + return this[kState].close.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + if (this[kState].stream === undefined) { + return PromiseReject(new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + requestIntoRequests: this[kState].requestIntoRequests.length, + close: this[kState].close.promise, + }); + } +} + +ObjectDefineProperties(ReadableStreamBYOBReader.prototype, { + closed: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + cancel: { enumerable: true }, +}); + +class ReadableStreamDefaultController { + [kType] = 'ReadableStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + return readableStreamDefaultControllerGetDesiredSize(this); + } + + close() { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + readableStreamDefaultControllerClose(this); + } + + /** + * @param {any} chunk + */ + enqueue(chunk = undefined) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + readableStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any} error + */ + error(error = undefined) { + readableStreamDefaultControllerError(this, error); + } + + [kCancel](reason) { + return readableStreamDefaultControllerCancelSteps(this, reason); + } + + [kPull](readRequest) { + readableStreamDefaultControllerPullSteps(this, readRequest); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { }); + } +} + +ObjectDefineProperties(ReadableStreamDefaultController.prototype, { + desiredSize: { enumerable: true }, + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, +}); + +function createReadableStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStreamDefaultController'; + this[kState] = {}; + }, + [], + ReadableStreamDefaultController, + ); +} + +class ReadableByteStreamController { + [kType] = 'ReadableByteStreamController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {ReadableStreamBYOBRequest} + */ + get byobRequest() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (this[kState].byobRequest === null && + this[kState].pendingPullIntos.length) { + const { + buffer, + byteOffset, + bytesFilled, + byteLength, + } = this[kState].pendingPullIntos[0]; + const view = + new Uint8Array( + buffer, + byteOffset + bytesFilled, + byteLength - bytesFilled); + this[kState].byobRequest = createReadableStreamBYOBRequest(this, view); + } + return this[kState].byobRequest; + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + return readableByteStreamControllerGetDesiredSize(this); + } + + close() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (this[kState].closeRequested) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + if (this[kState].stream[kState].state !== 'readable') + throw new ERR_INVALID_STATE.TypeError('ReadableStream is already closed'); + readableByteStreamControllerClose(this); + } + + /** + * @param {ArrayBufferView} chunk + */ + enqueue(chunk) { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (!isArrayBufferView(chunk)) { + throw new ERR_INVALID_ARG_TYPE( + 'chunk', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + chunk); + } + const chunkByteLength = ArrayBufferViewGetByteLength(chunk); + const chunkByteOffset = ArrayBufferViewGetByteOffset(chunk); + const chunkBuffer = ArrayBufferViewGetBuffer(chunk); + const chunkBufferByteLength = ArrayBufferGetByteLength(chunkBuffer); + if (chunkByteLength === 0 || chunkBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'chunk ArrayBuffer is zero-length or detached'); + } + if (this[kState].closeRequested) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + if (this[kState].stream[kState].state !== 'readable') + throw new ERR_INVALID_STATE.TypeError('ReadableStream is already closed'); + readableByteStreamControllerEnqueue( + this, + chunkBuffer, + chunkByteLength, + chunkByteOffset); + } + + /** + * @param {any} error + */ + error(error = undefined) { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + readableByteStreamControllerError(this, error); + } + + [kCancel](reason) { + return readableByteStreamControllerCancelSteps(this, reason); + } + + [kPull](readRequest) { + readableByteStreamControllerPullSteps(this, readRequest); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { }); + } +} + +ObjectDefineProperties(ReadableByteStreamController.prototype, { + byobRequest: { enumerable: true }, + desiredSize: { enumerable: true }, + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, +}); + +function createReadableByteStreamController() { + return ReflectConstruct( + function() { + this[kType] = 'ReadableByteStreamController'; + this[kState] = {}; + }, + [], + ReadableByteStreamController, + ); +} + +function createTeeReadableStream(start, pull, cancel) { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStream'; + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port: undefined, + promise: undefined, + } + }; + setupReadableStreamDefaultControllerFromSource( + this, + ObjectCreate(null, { + start: { value: start }, + pull: { value: pull }, + cancel: { value: cancel } + }), + 1, + () => 1); + return makeTransferable(this); + }, [], ReadableStream, + ); +} + +const isReadableStream = + isBrandCheck('ReadableStream'); +const isReadableByteStreamController = + isBrandCheck('ReadableByteStreamController'); +const isReadableStreamBYOBRequest = + isBrandCheck('ReadableStreamBYOBRequest'); +const isReadableStreamDefaultReader = + isBrandCheck('ReadableStreamDefaultReader'); +const isReadableStreamBYOBReader = + isBrandCheck('ReadableStreamBYOBReader'); + +// ---- ReadableStream Implementation + +function readableStreamPipeTo( + source, + dest, + preventClose, + preventAbort, + preventCancel, + signal) { + + let reader; + let writer; + // Both of these can throw synchronously. We want to capture + // the error and return a rejected promise instead. + try { + reader = new ReadableStreamDefaultReader(source); + writer = new WritableStreamDefaultWriter(dest); + } catch (error) { + return PromiseReject(error); + } + + source[kState].disturbed = true; + + let shuttingDown = false; + + if (signal !== undefined && signal?.[kAborted] === undefined) { + return PromiseReject( + new ERR_INVALID_ARG_TYPE( + 'options.signal', + 'AbortSignal', + signal)); + } + + const promise = createDeferredPromise(); + + let currentWrite = PromiseResolve(); + + // The error here can be undefined. The rejected arg + // tells us that the promise must be rejected even + // when error is undefine. + function finalize(rejected, error) { + writableStreamDefaultWriterRelease(writer); + readableStreamReaderGenericRelease(reader); + if (signal !== undefined) + signal.removeEventListener('abort', abortAlgorithm); + if (rejected) + promise.reject(error); + else + promise.resolve(); + } + + async function waitForCurrentWrite() { + const write = currentWrite; + await write; + if (write !== currentWrite) + await waitForCurrentWrite(); + } + + function shutdownWithAnAction(action, rejected, originalError) { + if (shuttingDown) return; + shuttingDown = true; + if (dest[kState].state === 'writable' && + !writableStreamCloseQueuedOrInFlight(dest)) { + PromisePrototypeThen( + waitForCurrentWrite(), + complete, + (error) => finalize(true, error)); + return; + } + complete(); + + function complete() { + PromisePrototypeThen( + action(), + () => finalize(rejected, originalError), + (error) => finalize(true, error)); + } + } + + function shutdown(rejected, error) { + if (shuttingDown) return; + shuttingDown = true; + if (dest[kState].state === 'writable' && + !writableStreamCloseQueuedOrInFlight(dest)) { + PromisePrototypeThen( + waitForCurrentWrite(), + () => finalize(rejected, error), + (error) => finalize(true, error)); + return; + } + finalize(rejected, error); + } + + function abortAlgorithm() { + // Cannot use the AbortError class here. It must be a DOMException + const error = new DOMException('The operation was aborted', 'AbortError'); + const actions = []; + if (!preventAbort) { + ArrayPrototypePush( + actions, + () => { + if (dest[kState].state === 'writable') + return writableStreamAbort(dest, error); + return PromiseResolve(); + }); + } + if (!preventCancel) { + ArrayPrototypePush( + actions, + () => { + if (source[kState].state === 'readable') + return readableStreamCancel(source, error); + return PromiseResolve(); + }); + } + + shutdownWithAnAction( + async () => PromiseAll(actions.map((action) => action())), + true, + error); + } + + function watchErrored(stream, promise, action) { + if (stream[kState].state === 'errored') + action(stream[kState].storedError); + else + PromisePrototypeCatch(promise, action); + } + + function watchClosed(stream, promise, action) { + if (stream[kState].state === 'closed') + action(stream[kState].storedError); + else + PromisePrototypeThen(promise, action, () => {}); + } + + async function step() { + if (shuttingDown) + return true; + await writer[kState].ready.promise; + return new Promise((resolve, reject) => { + readableStreamDefaultReaderRead( + reader, + { + [kChunk](chunk) { + currentWrite = writableStreamDefaultWriterWrite(writer, chunk); + setPromiseHandled(currentWrite); + resolve(false); + }, + [kClose]: () => resolve(true), + [kError]: reject, + }); + }); + } + + async function run() { + // Run until step resolves as true + while (!await step()) {} + } + + if (signal !== undefined) { + if (signal.aborted) { + abortAlgorithm(); + return promise.promise; + } + signal.addEventListener('abort', abortAlgorithm, { once: true }); + } + + setPromiseHandled(run()); + + watchErrored(source, reader[kState].close.promise, (error) => { + if (!preventAbort) { + return shutdownWithAnAction( + () => writableStreamAbort(dest, error), + true, + error); + } + shutdown(true, error); + }); + + watchErrored(dest, writer[kState].close.promise, (error) => { + if (!preventCancel) { + return shutdownWithAnAction( + () => readableStreamCancel(source, error), + true, + error); + } + shutdown(true, error); + }); + + watchClosed(source, reader[kState].close.promise, () => { + if (!preventClose) { + return shutdownWithAnAction( + () => writableStreamDefaultWriterCloseWithErrorPropagation(writer)); + } + shutdown(); + }); + + if (writableStreamCloseQueuedOrInFlight(dest) || + dest[kState].state === 'closed') { + const error = new ERR_INVALID_STATE.TypeError( + 'Destination WritableStream is closed'); + if (!preventCancel) { + shutdownWithAnAction( + () => readableStreamCancel(source, error), true, error); + } else { + shutdown(true, error); + } + } + + return promise.promise; +} + +function readableStreamTee(stream, cloneForBranch2) { + const reader = new ReadableStreamDefaultReader(stream); + let reading = false; + let canceled1 = false; + let canceled2 = false; + let reason1; + let reason2; + let branch1; + let branch2; + const cancelPromise = createDeferredPromise(); + + async function pullAlgorithm() { + if (reading) return; + reading = true; + const readRequest = { + [kChunk](value) { + queueMicrotask(() => { + reading = false; + const value1 = value; + let value2 = value; + if (!canceled2 && cloneForBranch2) { + // Structured Clone + value2 = deserialize(serialize(value2)); + } + if (!canceled1) { + readableStreamDefaultControllerEnqueue( + branch1[kState].controller, + value1); + } + if (!canceled2) { + readableStreamDefaultControllerEnqueue( + branch2[kState].controller, + value2); + } + }); + }, + [kClose]() { + reading = false; + if (!canceled1) + readableStreamDefaultControllerClose(branch1[kState].controller); + if (!canceled2) + readableStreamDefaultControllerClose(branch2[kState].controller); + if (!canceled1 || !canceled2) + cancelPromise.resolve(); + }, + [kError]() { + reading = false; + }, + }; + readableStreamDefaultReaderRead(reader, readRequest); + } + + function cancel1Algorithm(reason) { + canceled1 = true; + reason1 = reason; + if (canceled2) { + const compositeReason = [reason1, reason2]; + cancelPromise.resolve(readableStreamCancel(stream, compositeReason)); + } + return cancelPromise.promise; + } + + function cancel2Algorithm(reason) { + canceled2 = true; + reason2 = reason; + if (canceled1) { + const compositeReason = [reason1, reason2]; + cancelPromise.resolve(readableStreamCancel(stream, compositeReason)); + } + return cancelPromise.promise; + } + + branch1 = + createTeeReadableStream(nonOpStart, pullAlgorithm, cancel1Algorithm); + branch2 = + createTeeReadableStream(nonOpStart, pullAlgorithm, cancel2Algorithm); + + PromisePrototypeCatch( + reader[kState].close.promise, + (error) => { + readableStreamDefaultControllerError(branch1[kState].controller, error); + readableStreamDefaultControllerError(branch2[kState].controller, error); + if (!canceled1 || !canceled2) + cancelPromise.resolve(); + }); + + return [branch1, branch2]; +} + +function readableByteStreamControllerConvertPullIntoDescriptor(desc) { + const { + buffer, + bytesFilled, + byteLength, + byteOffset, + ctor, + elementSize, + } = desc; + if (bytesFilled > byteLength) + throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); + assert(!(bytesFilled % elementSize)); + const transferedBuffer = transferArrayBuffer(buffer); + return new ctor(transferedBuffer, byteOffset, bytesFilled / elementSize); +} + +function isReadableStreamLocked(stream) { + return stream[kState].reader !== undefined; +} + +function readableStreamCancel(stream, reason) { + stream[kState].disturbed = true; + switch (stream[kState].state) { + case 'closed': + return PromiseResolve(); + case 'errored': + return PromiseReject(stream[kState].storedError); + } + readableStreamClose(stream); + const { + reader, + } = stream[kState]; + if (reader !== undefined && readableStreamHasBYOBReader(stream)) { + for (let n = 0; n < reader[kState].readIntoRequests.length; n++) + reader[kState].readIntoRequests[n][kClose](); + reader[kState].readIntoRequests = []; + } + + return PromisePrototypeThen( + ensureIsPromise( + stream[kState].controller[kCancel], + stream[kState].controller, + reason), + () => {}); +} + +function readableStreamClose(stream) { + assert(stream[kState].state === 'readable'); + stream[kState].state = 'closed'; + + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return; + + reader[kState].close.resolve(); + + if (readableStreamHasDefaultReader(stream)) { + for (let n = 0; n < reader[kState].readRequests.length; n++) + reader[kState].readRequests[n][kClose](); + reader[kState].readRequests = []; + } +} + +function readableStreamError(stream, error) { + assert(stream[kState].state === 'readable'); + stream[kState].state = 'errored'; + stream[kState].storedError = error; + + const { + reader + } = stream[kState]; + + if (reader === undefined) + return; + + reader[kState].close.reject(error); + setPromiseHandled(reader[kState].close.promise); + + if (readableStreamHasDefaultReader(stream)) { + for (let n = 0; n < reader[kState].readRequests.length; n++) + reader[kState].readRequests[n][kError](error); + reader[kState].readRequests = []; + } else { + assert(readableStreamHasBYOBReader(stream)); + for (let n = 0; n < reader[kState].readIntoRequests.length; n++) + reader[kState].readIntoRequests[n][kError](error); + reader[kState].readIntoRequests = []; + } +} + +function readableStreamHasDefaultReader(stream) { + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return false; + + return reader[kState] !== undefined && + reader[kType] === 'ReadableStreamDefaultReader'; +} + +function readableStreamGetNumReadRequests(stream) { + assert(readableStreamHasDefaultReader(stream)); + return stream[kState].reader[kState].readRequests.length; +} + +function readableStreamHasBYOBReader(stream) { + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return false; + + return reader[kState] !== undefined && + reader[kType] === 'ReadableStreamBYOBReader'; +} + +function readableStreamGetNumReadIntoRequests(stream) { + assert(readableStreamHasBYOBReader(stream)); + return stream[kState].reader[kState].readIntoRequests.length; +} + +function readableStreamFulfillReadRequest(stream, chunk, done) { + assert(readableStreamHasDefaultReader(stream)); + const { + reader, + } = stream[kState]; + assert(reader[kState].readRequests.length); + const readRequest = ArrayPrototypeShift(reader[kState].readRequests); + + // TODO(@jasnell): It's not clear under what exact conditions done + // will be true here. The spec requires this check but none of the + // WPT's or other tests trigger it. Will need to investigate how to + // get coverage for this. + if (done) + readRequest[kClose](); + else + readRequest[kChunk](chunk); +} + +function readableStreamFulfillReadIntoRequest(stream, chunk, done) { + assert(readableStreamHasBYOBReader(stream)); + const { + reader, + } = stream[kState]; + assert(reader[kState].readIntoRequests.length); + const readIntoRequest = ArrayPrototypeShift(reader[kState].readIntoRequests); + if (done) + readIntoRequest[kClose](chunk); + else + readIntoRequest[kChunk](chunk); +} + +function readableStreamAddReadRequest(stream, readRequest) { + assert(readableStreamHasDefaultReader(stream)); + assert(stream[kState].state === 'readable'); + ArrayPrototypePush(stream[kState].reader[kState].readRequests, readRequest); +} + +function readableStreamAddReadIntoRequest(stream, readIntoRequest) { + assert(readableStreamHasBYOBReader(stream)); + assert(stream[kState].state !== 'errored'); + ArrayPrototypePush( + stream[kState].reader[kState].readIntoRequests, + readIntoRequest); +} + +function readableStreamReaderGenericCancel(reader, reason) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + return readableStreamCancel(stream, reason); +} + +function readableStreamReaderGenericInitialize(reader, stream) { + reader[kState].stream = stream; + stream[kState].reader = reader; + switch (stream[kState].state) { + case 'readable': + reader[kState].close = createDeferredPromise(); + break; + case 'closed': + reader[kState].close = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + break; + case 'errored': + reader[kState].close = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(reader[kState].close.promise); + break; + } +} + +function readableStreamReaderGenericRelease(reader) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + assert(stream[kState].reader === reader); + + if (stream[kState].state === 'readable') { + reader[kState].close.reject?.( + new ERR_INVALID_STATE.TypeError('Reader released')); + } else { + reader[kState].close = { + promise: PromiseReject( + new ERR_INVALID_STATE.TypeError('Reader released')), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(reader[kState].close.promise); + stream[kState].reader = undefined; + reader[kState].stream = undefined; +} + +function readableStreamBYOBReaderRead(reader, view, readIntoRequest) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + stream[kState].disturbed = true; + if (stream[kState].state === 'errored') { + readIntoRequest[kError](stream[kState].storedError); + return; + } + readableByteStreamControllerPullInto( + stream[kState].controller, + view, + readIntoRequest); +} + +function readableStreamDefaultReaderRead(reader, readRequest) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + stream[kState].disturbed = true; + switch (stream[kState].state) { + case 'closed': + readRequest[kClose](); + break; + case 'errored': + readRequest[kError](stream[kState].storedError); + break; + case 'readable': + stream[kState].controller[kPull](readRequest); + } +} + +function setupReadableStreamBYOBReader(reader, stream) { + if (isReadableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('ReadableStream is locked'); + const { + controller, + } = stream[kState]; + if (!isReadableByteStreamController(controller)) + throw new ERR_INVALID_ARG_VALUE('reader', reader, 'must be a byte stream'); + readableStreamReaderGenericInitialize(reader, stream); + reader[kState].readIntoRequests = []; +} + +function setupReadableStreamDefaultReader(reader, stream) { + if (isReadableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('ReadableStream is locked'); + readableStreamReaderGenericInitialize(reader, stream); + reader[kState].readRequests = []; +} + +function readableStreamDefaultControllerClose(controller) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) + return; + controller[kState].closeRequested = true; + if (!controller[kState].queue.length) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(controller[kState].stream); + } +} + +function readableStreamDefaultControllerEnqueue(controller, chunk) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) + return; + + const { + stream, + } = controller[kState]; + + if (isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream)) { + readableStreamFulfillReadRequest(stream, chunk, false); + } else { + try { + const chunkSize = + FunctionPrototypeCall( + controller[kState].sizeAlgorithm, + undefined, + chunk); + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + readableStreamDefaultControllerError(controller, error); + throw error; + } + } + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +function readableStreamDefaultControllerHasBackpressure(controller) { + return !readableStreamDefaultControllerShouldCallPull(controller); +} + +function readableStreamDefaultControllerCanCloseOrEnqueue(controller) { + const { + stream, + } = controller[kState]; + return !controller[kState].closeRequested && + stream[kState].state === 'readable'; +} + +function readableStreamDefaultControllerGetDesiredSize(controller) { + const { + stream, + highWaterMark, + queueTotalSize, + } = controller[kState]; + switch (stream[kState].state) { + case 'errored': return null; + case 'closed': return 0; + default: + return highWaterMark - queueTotalSize; + } +} + +function readableStreamDefaultControllerShouldCallPull(controller) { + const { + stream, + } = controller[kState]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller) || + !controller[kState].started) + return false; + + if (isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream)) { + return true; + } + + const desiredSize = readableStreamDefaultControllerGetDesiredSize(controller); + assert(desiredSize !== null); + + return desiredSize > 0; +} + +function readableStreamDefaultControllerCallPullIfNeeded(controller) { + if (!readableStreamDefaultControllerShouldCallPull(controller)) + return; + if (controller[kState].pulling) { + controller[kState].pullAgain = true; + return; + } + assert(!controller[kState].pullAgain); + controller[kState].pulling = true; + PromisePrototypeThen( + ensureIsPromise(controller[kState].pullAlgorithm, controller), + () => { + controller[kState].pulling = false; + if (controller[kState].pullAgain) { + controller[kState].pullAgain = false; + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + }, + (error) => readableStreamDefaultControllerError(controller, error)); +} + +function readableStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].pullAlgorithm = undefined; + controller[kState].cancelAlgorithm = undefined; + controller[kState].sizeAlgorithm = undefined; +} + +function readableStreamDefaultControllerError(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state === 'readable') { + resetQueue(controller); + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamError(stream, error); + } +} + +function readableStreamDefaultControllerCancelSteps(controller, reason) { + resetQueue(controller); + const result = controller[kState].cancelAlgorithm(reason); + readableStreamDefaultControllerClearAlgorithms(controller); + return result; +} + +function readableStreamDefaultControllerPullSteps(controller, readRequest) { + const { + stream, + queue, + } = controller[kState]; + if (queue.length) { + const chunk = dequeueValue(controller); + if (controller[kState].closeRequested && !queue.length) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(stream); + } else { + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + readRequest[kChunk](chunk); + return; + } + readableStreamAddReadRequest(stream, readRequest); + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +function setupReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm) { + assert(stream[kState].controller === undefined); + controller[kState] = { + cancelAlgorithm, + closeRequested: false, + highWaterMark, + pullAgain: false, + pullAlgorithm, + pulling: false, + queue: [], + queueTotalSize: 0, + started: false, + sizeAlgorithm, + stream, + }; + stream[kState].controller = controller; + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + controller[kState].started = true; + assert(!controller[kState].pulling); + assert(!controller[kState].pullAgain); + readableStreamDefaultControllerCallPullIfNeeded(controller); + }, + (error) => readableStreamDefaultControllerError(controller, error)); +} + +function setupReadableStreamDefaultControllerFromSource( + stream, + source, + highWaterMark, + sizeAlgorithm) { + const controller = createReadableStreamDefaultController(); + const start = source?.start; + const pull = source?.pull; + const cancel = source?.cancel; + const startAlgorithm = start ? + FunctionPrototypeBind(start, source, controller) : + nonOpStart; + const pullAlgorithm = pull ? + FunctionPrototypeBind(pull, source, controller) : + nonOpPull; + + const cancelAlgorithm = cancel ? + FunctionPrototypeBind(cancel, source) : + nonOpCancel; + + setupReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm); +} + +function readableByteStreamControllerClose(controller) { + const { + closeRequested, + pendingPullIntos, + queueTotalSize, + stream, + } = controller[kState]; + + if (closeRequested || stream[kState].state !== 'readable') + return; + + if (queueTotalSize) { + controller[kState].closeRequested = true; + return; + } + + if (pendingPullIntos.length) { + const firstPendingPullInto = pendingPullIntos[0]; + if (firstPendingPullInto.bytesFilled > 0) { + const error = new ERR_INVALID_STATE.TypeError('Partial read'); + readableByteStreamControllerError(controller, error); + throw error; + } + } + + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); +} + +function readableByteStreamControllerCommitPullIntoDescriptor(stream, desc) { + assert(stream[kState].state !== 'errored'); + let done = false; + if (stream[kState].state === 'closed') { + desc.bytesFilled = 0; + done = true; + } + + const filledView = + readableByteStreamControllerConvertPullIntoDescriptor(desc); + + if (desc.type === 'default') { + readableStreamFulfillReadRequest(stream, filledView, done); + } else { + assert(desc.type === 'byob'); + readableStreamFulfillReadIntoRequest(stream, filledView, done); + } +} + +function readableByteStreamControllerInvalidateBYOBRequest(controller) { + if (controller[kState].byobRequest === null) + return; + controller[kState].byobRequest[kState].controller = undefined; + controller[kState].byobRequest[kState].view = null; + controller[kState].byobRequest = null; +} + +function readableByteStreamControllerClearAlgorithms(controller) { + controller[kState].pullAlgorithm = undefined; + controller[kState].cancelAlgorithm = undefined; +} + +function readableByteStreamControllerClearPendingPullIntos(controller) { + readableByteStreamControllerInvalidateBYOBRequest(controller); + controller[kState].pendingPullIntos = []; +} + +function readableByteStreamControllerGetDesiredSize(controller) { + const { + stream, + highWaterMark, + queueTotalSize, + } = controller[kState]; + switch (stream[kState].state) { + case 'errored': return null; + case 'closed': return 0; + default: return highWaterMark - queueTotalSize; + } +} + +function readableByteStreamControllerShouldCallPull(controller) { + const { + stream, + } = controller[kState]; + if (stream[kState].state !== 'readable' || + controller[kState].closeRequested || + !controller[kState].started) { + return false; + } + if (readableStreamHasDefaultReader(stream) && + readableStreamGetNumReadRequests(stream) > 0) { + return true; + } + + if (readableStreamHasBYOBReader(stream) && + readableStreamGetNumReadIntoRequests(stream) > 0) { + return true; + } + + const desiredSize = readableByteStreamControllerGetDesiredSize(controller); + assert(desiredSize !== null); + + return desiredSize > 0; +} + +function readableByteStreamControllerHandleQueueDrain(controller) { + const { + closeRequested, + queueTotalSize, + stream, + } = controller[kState]; + assert(stream[kState].state === 'readable'); + if (!queueTotalSize && closeRequested) { + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); + return; + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerPullInto( + controller, + view, + readIntoRequest) { + const { + closeRequested, + stream, + pendingPullIntos, + } = controller[kState]; + let elementSize = 1; + let ctor = DataViewCtor; + if (isArrayBufferView(view) && !isDataView(view)) { + elementSize = view.constructor.BYTES_PER_ELEMENT; + ctor = view.constructor; + } + const buffer = ArrayBufferViewGetBuffer(view); + const byteOffset = ArrayBufferViewGetByteOffset(view); + const byteLength = ArrayBufferViewGetByteLength(view); + const bufferByteLength = ArrayBufferGetByteLength(buffer); + + let transferedBuffer; + try { + transferedBuffer = transferArrayBuffer(buffer); + } catch (error) { + readIntoRequest[kError](error); + return; + } + const desc = { + buffer: transferedBuffer, + bufferByteLength, + byteOffset, + byteLength, + bytesFilled: 0, + elementSize, + ctor, + type: 'byob', + }; + if (pendingPullIntos.length) { + ArrayPrototypePush(pendingPullIntos, desc); + readableStreamAddReadIntoRequest(stream, readIntoRequest); + return; + } + if (stream[kState].state === 'closed') { + const emptyView = new ctor(desc.buffer, byteOffset, 0); + readIntoRequest[kClose](emptyView); + return; + } + if (controller[kState].queueTotalSize) { + if (readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc)) { + const filledView = + readableByteStreamControllerConvertPullIntoDescriptor(desc); + readableByteStreamControllerHandleQueueDrain(controller); + readIntoRequest[kChunk](filledView); + return; + } + if (closeRequested) { + const error = new ERR_INVALID_STATE.TypeError('ReadableStream closed'); + readableByteStreamControllerError(controller, error); + readIntoRequest[kError](error); + return; + } + } + ArrayPrototypePush(pendingPullIntos, desc); + readableStreamAddReadIntoRequest(stream, readIntoRequest); + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerRespondInternal(controller, bytesWritten) { + const { + stream, + pendingPullIntos, + } = controller[kState]; + const desc = pendingPullIntos[0]; + readableByteStreamControllerInvalidateBYOBRequest(controller); + if (stream[kState].state === 'closed') { + if (bytesWritten) + throw new ERR_INVALID_STATE.TypeError( + 'Controller is closed but view is not zero-length'); + readableByteStreamControllerRespondInClosedState(controller, desc); + } else { + assert(stream[kState].state === 'readable'); + if (!bytesWritten) + throw new ERR_INVALID_STATE.TypeError('View cannot be zero-length'); + readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + desc); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerRespond(controller, bytesWritten) { + const { + pendingPullIntos, + stream, + } = controller[kState]; + assert(pendingPullIntos.length); + const desc = pendingPullIntos[0]; + + if (stream[kState].state === 'closed') { + if (bytesWritten !== 0) + throw new ERR_INVALID_ARG_VALUE('bytesWritten', bytesWritten); + } else { + assert(stream[kState].state === 'readable'); + + if (!bytesWritten) + throw new ERR_INVALID_ARG_VALUE('bytesWritten', bytesWritten); + + if ((desc.bytesFilled + bytesWritten) > desc.byteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('bytesWritten', bytesWritten); + } + + desc.buffer = transferArrayBuffer(desc.buffer); + + readableByteStreamControllerRespondInternal(controller, bytesWritten); +} + +function readableByteStreamControllerRespondInClosedState(controller, desc) { + assert(!desc.bytesFilled); + const { + stream, + } = controller[kState]; + if (readableStreamHasBYOBReader(stream)) { + while (readableStreamGetNumReadIntoRequests(stream) > 0) { + readableByteStreamControllerCommitPullIntoDescriptor( + stream, + readableByteStreamControllerShiftPendingPullInto(controller)); + } + } +} + +function readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + size, + desc) { + const { + pendingPullIntos, + byobRequest, + } = controller[kState]; + assert(!pendingPullIntos.length || pendingPullIntos[0] === desc); + assert(byobRequest === null); + desc.bytesFilled += size; +} + +function readableByteStreamControllerEnqueue( + controller, + buffer, + byteLength, + byteOffset) { + const { + closeRequested, + pendingPullIntos, + queue, + stream, + } = controller[kState]; + + if (closeRequested || stream[kState].state !== 'readable') + return; + + const transferedBuffer = transferArrayBuffer(buffer); + + if (pendingPullIntos.length) { + const firstPendingPullInto = pendingPullIntos[0]; + + const pendingBufferByteLength = + ArrayBufferGetByteLength(firstPendingPullInto.buffer); + if (pendingBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'Destination ArrayBuffer is zero-length or detached'); + } + + firstPendingPullInto.buffer = + transferArrayBuffer(firstPendingPullInto.buffer); + } + + readableByteStreamControllerInvalidateBYOBRequest(controller); + + if (readableStreamHasDefaultReader(stream)) { + if (!readableStreamGetNumReadRequests(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + } else { + assert(!queue.length); + const transferedView = + new Uint8Array(transferedBuffer, byteOffset, byteLength); + readableStreamFulfillReadRequest(stream, transferedView, false); + } + } else if (readableStreamHasBYOBReader(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller); + } else { + assert(!isReadableStreamLocked(stream)); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerEnqueueChunkToQueue( + controller, + buffer, + byteOffset, + byteLength) { + ArrayPrototypePush( + controller[kState].queue, + { + buffer, + byteOffset, + byteLength, + }); + controller[kState].queueTotalSize += byteLength; +} + +function readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc) { + const { + buffer, + byteLength, + byteOffset, + bytesFilled, + elementSize, + } = desc; + const currentAlignedBytes = bytesFilled - (bytesFilled % elementSize); + const maxBytesToCopy = MathMin( + controller[kState].queueTotalSize, + byteLength - bytesFilled); + const maxBytesFilled = bytesFilled + maxBytesToCopy; + const maxAlignedBytes = maxBytesFilled - (maxBytesFilled % elementSize); + let totalBytesToCopyRemaining = maxBytesToCopy; + let ready = false; + if (maxAlignedBytes > currentAlignedBytes) { + totalBytesToCopyRemaining = maxAlignedBytes - bytesFilled; + ready = true; + } + const { + queue, + } = controller[kState]; + + while (totalBytesToCopyRemaining) { + const headOfQueue = queue[0]; + const bytesToCopy = MathMin( + totalBytesToCopyRemaining, + headOfQueue.byteLength); + const destStart = byteOffset + desc.bytesFilled; + const arrayBufferByteLength = ArrayBufferGetByteLength(buffer); + if (arrayBufferByteLength - destStart < bytesToCopy) { + throw new ERR_INVALID_STATE.RangeError( + 'view ArrayBuffer size is invalid'); + } + assert(arrayBufferByteLength - destStart >= bytesToCopy); + copyArrayBuffer( + buffer, + destStart, + headOfQueue.buffer, + headOfQueue.byteOffset, + bytesToCopy); + if (headOfQueue.byteLength === bytesToCopy) { + ArrayPrototypeShift(queue); + } else { + headOfQueue.byteOffset += bytesToCopy; + headOfQueue.byteLength -= bytesToCopy; + } + controller[kState].queueTotalSize -= bytesToCopy; + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesToCopy, + desc); + totalBytesToCopyRemaining -= bytesToCopy; + } + + if (!ready) { + assert(!controller[kState].queueTotalSize); + assert(desc.bytesFilled > 0); + assert(desc.bytesFilled < elementSize); + } + return ready; +} + +function readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller) { + const { + closeRequested, + pendingPullIntos, + stream, + } = controller[kState]; + assert(!closeRequested); + while (pendingPullIntos.length) { + if (!controller[kState].queueTotalSize) + return; + const desc = pendingPullIntos[0]; + if (readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc)) { + readableByteStreamControllerShiftPendingPullInto(controller); + readableByteStreamControllerCommitPullIntoDescriptor(stream, desc); + } + } +} + +function readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + desc) { + const { + buffer, + bytesFilled, + byteLength, + } = desc; + + if (bytesFilled + bytesWritten > byteLength) + throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); + + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesWritten, + desc); + + if (desc.bytesFilled < desc.elementSize) + return; + + readableByteStreamControllerShiftPendingPullInto(controller); + + const remainderSize = desc.bytesFilled % desc.elementSize; + + if (remainderSize) { + const end = desc.byteOffset + desc.bytesFilled; + const start = end - remainderSize; + const remainder = + ArrayBufferPrototypeSlice( + buffer, + start, + end); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + remainder, + 0, + ArrayBufferGetByteLength(remainder)); + } + desc.bytesFilled -= remainderSize; + readableByteStreamControllerCommitPullIntoDescriptor( + controller[kState].stream, + desc); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); +} + +function readableByteStreamControllerRespondWithNewView(controller, view) { + const { + stream, + pendingPullIntos, + } = controller[kState]; + assert(pendingPullIntos.length); + + const desc = pendingPullIntos[0]; + assert(stream[kState].state !== 'errored'); + + if (!isArrayBufferView(view)) { + throw new ERR_INVALID_ARG_TYPE( + 'view', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + view); + } + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewByteOffset = ArrayBufferViewGetByteOffset(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + const { + byteOffset, + byteLength, + bytesFilled, + bufferByteLength, + } = desc; + + if (byteOffset + bytesFilled !== viewByteOffset) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + if (bytesFilled + viewByteOffset > byteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + if (bufferByteLength !== viewBufferByteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + desc.buffer = transferArrayBuffer(viewBuffer); + + readableByteStreamControllerRespondInternal(controller, viewByteLength); +} + +function readableByteStreamControllerShiftPendingPullInto(controller) { + assert(controller[kState].byobRequest === null); + return ArrayPrototypeShift(controller[kState].pendingPullIntos); +} + +function readableByteStreamControllerCallPullIfNeeded(controller) { + if (!readableByteStreamControllerShouldCallPull(controller)) + return; + if (controller[kState].pulling) { + controller[kState].pullAgain = true; + return; + } + assert(!controller[kState].pullAgain); + controller[kState].pulling = true; + PromisePrototypeThen( + ensureIsPromise(controller[kState].pullAlgorithm, controller), + () => { + controller[kState].pulling = false; + if (controller[kState].pullAgain) { + controller[kState].pullAgain = false; + readableByteStreamControllerCallPullIfNeeded(controller); + } + }, + (error) => readableByteStreamControllerError(controller, error)); +} + +function readableByteStreamControllerError(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state !== 'readable') + return; + readableByteStreamControllerClearPendingPullIntos(controller); + resetQueue(controller); + readableByteStreamControllerClearAlgorithms(controller); + readableStreamError(stream, error); +} + +function readableByteStreamControllerCancelSteps(controller, reason) { + readableByteStreamControllerClearPendingPullIntos(controller); + resetQueue(controller); + const result = controller[kState].cancelAlgorithm(reason); + readableByteStreamControllerClearAlgorithms(controller); + return result; +} + +function readableByteStreamControllerPullSteps(controller, readRequest) { + const { + pendingPullIntos, + queue, + queueTotalSize, + stream, + } = controller[kState]; + assert(readableStreamHasDefaultReader(stream)); + if (queueTotalSize) { + assert(!readableStreamGetNumReadRequests(stream)); + const { + buffer, + byteOffset, + byteLength, + } = ArrayPrototypeShift(queue); + controller[kState].queueTotalSize -= byteLength; + readableByteStreamControllerHandleQueueDrain(controller); + const view = new Uint8Array(buffer, byteOffset, byteLength); + readRequest[kChunk](view); + return; + } + const { + autoAllocateChunkSize, + } = controller[kState]; + if (autoAllocateChunkSize !== undefined) { + try { + const buffer = new ArrayBuffer(autoAllocateChunkSize); + ArrayPrototypePush( + pendingPullIntos, + { + buffer, + byteOffset: 0, + byteLength: autoAllocateChunkSize, + bytesFilled: 0, + elementSize: 1, + ctor: Uint8Array, + type: 'default', + }); + } catch (error) { + readRequest[kError](error); + return; + } + } + + readableStreamAddReadRequest(stream, readRequest); + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function setupReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize) { + assert(stream[kState].controller === undefined); + if (autoAllocateChunkSize !== undefined) { + assert(NumberIsInteger(autoAllocateChunkSize)); + assert(autoAllocateChunkSize > 0); + } + controller[kState] = { + byobRequest: null, + closeRequested: false, + pullAgain: false, + pulling: false, + started: false, + stream, + queue: [], + queueTotalSize: 0, + highWaterMark, + pullAlgorithm, + cancelAlgorithm, + autoAllocateChunkSize, + pendingPullIntos: [], + }; + stream[kState].controller = controller; + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + controller[kState].started = true; + assert(!controller[kState].pulling); + assert(!controller[kState].pullAgain); + readableByteStreamControllerCallPullIfNeeded(controller); + }, + (error) => readableByteStreamControllerError(controller, error)); +} + +function setupReadableByteStreamControllerFromSource( + stream, + source, + highWaterMark) { + const controller = createReadableByteStreamController(); + const start = source?.start; + const pull = source?.pull; + const cancel = source?.cancel; + const autoAllocateChunkSize = source?.autoAllocateChunkSize; + const startAlgorithm = start ? + FunctionPrototypeBind(start, source, controller) : + nonOpStart; + const pullAlgorithm = pull ? + FunctionPrototypeBind(pull, source, controller) : + nonOpPull; + const cancelAlgorithm = cancel ? + FunctionPrototypeBind(cancel, source) : + nonOpCancel; + + if (autoAllocateChunkSize === 0) { + throw new ERR_INVALID_ARG_VALUE( + 'source.autoAllocateChunkSize', + autoAllocateChunkSize); + } + setupReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize); +} + +module.exports = { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransferedReadableStream, + + // Exported Brand Checks + isReadableStream, + isReadableByteStreamController, + isReadableStreamBYOBRequest, + isReadableStreamDefaultReader, + isReadableStreamBYOBReader, + isWritableStreamDefaultWriter, + isWritableStreamDefaultController, + + readableStreamPipeTo, + readableStreamTee, + readableByteStreamControllerConvertPullIntoDescriptor, + isReadableStreamLocked, + readableStreamCancel, + readableStreamClose, + readableStreamError, + readableStreamHasDefaultReader, + readableStreamGetNumReadRequests, + readableStreamHasBYOBReader, + readableStreamGetNumReadIntoRequests, + readableStreamFulfillReadRequest, + readableStreamFulfillReadIntoRequest, + readableStreamAddReadRequest, + readableStreamAddReadIntoRequest, + readableStreamReaderGenericCancel, + readableStreamReaderGenericInitialize, + readableStreamReaderGenericRelease, + readableStreamBYOBReaderRead, + readableStreamDefaultReaderRead, + setupReadableStreamBYOBReader, + setupReadableStreamDefaultReader, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerHasBackpressure, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableStreamDefaultControllerGetDesiredSize, + readableStreamDefaultControllerShouldCallPull, + readableStreamDefaultControllerCallPullIfNeeded, + readableStreamDefaultControllerClearAlgorithms, + readableStreamDefaultControllerError, + readableStreamDefaultControllerCancelSteps, + readableStreamDefaultControllerPullSteps, + setupReadableStreamDefaultController, + setupReadableStreamDefaultControllerFromSource, + readableByteStreamControllerClose, + readableByteStreamControllerCommitPullIntoDescriptor, + readableByteStreamControllerInvalidateBYOBRequest, + readableByteStreamControllerClearAlgorithms, + readableByteStreamControllerClearPendingPullIntos, + readableByteStreamControllerGetDesiredSize, + readableByteStreamControllerShouldCallPull, + readableByteStreamControllerHandleQueueDrain, + readableByteStreamControllerPullInto, + readableByteStreamControllerRespondInternal, + readableByteStreamControllerRespond, + readableByteStreamControllerRespondInClosedState, + readableByteStreamControllerFillHeadPullIntoDescriptor, + readableByteStreamControllerEnqueue, + readableByteStreamControllerEnqueueChunkToQueue, + readableByteStreamControllerFillPullIntoDescriptorFromQueue, + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue, + readableByteStreamControllerRespondInReadableState, + readableByteStreamControllerRespondWithNewView, + readableByteStreamControllerShiftPendingPullInto, + readableByteStreamControllerCallPullIfNeeded, + readableByteStreamControllerError, + readableByteStreamControllerCancelSteps, + readableByteStreamControllerPullSteps, + setupReadableByteStreamController, + setupReadableByteStreamControllerFromSource, +}; + +/* eslint-enable no-use-before-define */ diff --git a/lib/internal/webstreams/transfer.js b/lib/internal/webstreams/transfer.js new file mode 100644 index 00000000000000..72cdc36a153564 --- /dev/null +++ b/lib/internal/webstreams/transfer.js @@ -0,0 +1,299 @@ +'use strict'; + +const { + ObjectDefineProperties, + PromiseResolve, + ReflectConstruct, +} = primordials; + +const { + kState, + setPromiseHandled, +} = require('internal/webstreams/util'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + ReadableStream, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerError, + readableStreamPipeTo, +} = require('internal/webstreams/readablestream'); + +const { + WritableStream, + writableStreamDefaultControllerErrorIfNeeded, +} = require('internal/webstreams/writablestream'); + +const { + createDeferredPromise, +} = require('internal/util'); + +const assert = require('internal/assert'); + +const { + makeTransferable, + kClone, + kDeserialize, +} = require('internal/worker/js_transferable'); + +// This class is a bit of a hack. The Node.js implementation of +// DOMException is not transferable/cloneable. This provides us +// with a variant that is. Unfortunately, it means playing around +// a bit with the message, name, and code properties and the +// prototype. We can revisit this if DOMException is ever made +// properly cloneable. +class CloneableDOMException extends DOMException { + constructor(message, name) { + super(message, name); + this[kDeserialize]({ + message: this.message, + name: this.name, + code: this.code, + }); + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + [kClone]() { + return { + data: { + message: this.message, + name: this.name, + code: this.code, + }, + deserializeInfo: + 'internal/webstreams/transfer:InternalCloneableDOMException' + }; + } + + [kDeserialize]({ message, name, code }) { + ObjectDefineProperties(this, { + message: { + configurable: true, + enumerable: true, + get() { return message; }, + }, + name: { + configurable: true, + enumerable: true, + get() { return name; }, + }, + code: { + configurable: true, + enumerable: true, + get() { return code; }, + }, + }); + } +} + +function InternalCloneableDOMException() { + return makeTransferable( + ReflectConstruct( + CloneableDOMException, + [], + DOMException)); +} +InternalCloneableDOMException[kDeserialize] = () => {}; + +class CrossRealmTransformReadableSource { + constructor(port) { + this[kState] = { + port, + controller: undefined, + }; + + port.onmessage = ({ data }) => { + const { + controller, + } = this[kState]; + const { + type, + value, + } = data; + switch (type) { + case 'chunk': + readableStreamDefaultControllerEnqueue( + controller, + value); + break; + case 'close': + readableStreamDefaultControllerClose(controller); + port.close(); + break; + case 'error': + readableStreamDefaultControllerError(controller, value); + port.close(); + break; + } + }; + + port.onmessageerror = () => { + const error = new CloneableDOMException( + 'Internal transfered ReadableStream error', + 'DataCloneError'); + port.postMessage({ type: 'error', value: error }); + readableStreamDefaultControllerError( + this[kState].controller, + error); + port.close(); + }; + } + + start(controller) { + this[kState].controller = controller; + } + + async pull() { + this[kState].port.postMessage({ type: 'pull' }); + } + + async cancel(reason) { + try { + this[kState].port.postMessage({ type: 'error', value: reason }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + throw error; + } finally { + this[kState].port.close(); + } + } +} + +class CrossRealmTransformWritableSink { + constructor(port) { + this[kState] = { + port, + controller: undefined, + backpressurePromise: createDeferredPromise(), + }; + + port.onmessage = ({ data }) => { + assert(typeof data === 'object'); + const { + type, + value + } = { ...data }; + assert(typeof type === 'string'); + switch (type) { + case 'pull': + if (this[kState].backpressurePromise !== undefined) + this[kState].backpressurePromise.resolve?.(); + this[kState].backpressurePromise = undefined; + break; + case 'error': + writableStreamDefaultControllerErrorIfNeeded( + this[kState].controller, + value); + if (this[kState].backpressurePromise !== undefined) + this[kState].backpressurePromise.resolve?.(); + this[kState].backpressurePromise = undefined; + break; + } + }; + port.onmessageerror = () => { + const error = new CloneableDOMException( + 'Internal transfered ReadableStream error', + 'DataCloneError'); + port.postMessage({ type: 'error', value: error }); + writableStreamDefaultControllerErrorIfNeeded( + this[kState].controller, + error); + port.close(); + }; + + } + + start(controller) { + this[kState].controller = controller; + } + + async write(chunk) { + if (this[kState].backpressurePromise === undefined) { + this[kState].backpressurePromise = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + } + await this[kState].backpressurePromise.promise; + this[kState].backpressurePromise = createDeferredPromise(); + try { + this[kState].port.postMessage({ type: 'chunk', value: chunk }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + this[kState].port.close(); + throw error; + } + } + + close() { + this[kState].port.postMessage({ type: 'close' }); + this[kState].port.close(); + } + + abort(reason) { + try { + this[kState].port.postMessage({ type: 'error', value: reason }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + throw error; + } finally { + this[kState].port.close(); + } + } +} + +function newCrossRealmReadableStream(writable, port) { + const readable = + new ReadableStream( + new CrossRealmTransformReadableSource(port)); + + const promise = + readableStreamPipeTo(readable, writable, false, false, false); + + setPromiseHandled(promise); + + return { + readable, + promise, + }; +} + +function newCrossRealmWritableSink(readable, port) { + const writable = + new WritableStream( + new CrossRealmTransformWritableSink(port)); + + const promise = readableStreamPipeTo(readable, writable, false, false, false); + setPromiseHandled(promise); + return { + writable, + promise, + }; +} + +module.exports = { + newCrossRealmReadableStream, + newCrossRealmWritableSink, + CrossRealmTransformWritableSink, + CrossRealmTransformReadableSource, + CloneableDOMException, + InternalCloneableDOMException, +}; diff --git a/lib/internal/webstreams/transformstream.js b/lib/internal/webstreams/transformstream.js new file mode 100644 index 00000000000000..745675266f7f1b --- /dev/null +++ b/lib/internal/webstreams/transformstream.js @@ -0,0 +1,591 @@ +'use strict'; + +const { + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + PromisePrototypeCatch, + PromisePrototypeThen, + PromiseResolve, + ReflectConstruct, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + customInspect, + ensureIsPromise, + extractHighWaterMark, + extractSizeAlgorithm, + isBrandCheck, + nonOpFlush, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + ReadableStream, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerError, + readableStreamDefaultControllerGetDesiredSize, + readableStreamDefaultControllerHasBackpressure, +} = require('internal/webstreams/readablestream'); + +const { + WritableStream, + writableStreamDefaultControllerErrorIfNeeded, +} = require('internal/webstreams/writablestream'); + +const assert = require('internal/assert'); + +/** + * @typedef {import('./queuingstrategies').QueuingStrategy + * } QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * + * @callback TransformerStartCallback + * @param {TransformStreamDefaultController} controller; + * + * @callback TransformerFlushCallback + * @param {TransformStreamDefaultController} controller; + * @returns {Promise} + * + * @callback TransformerTransformCallback + * @param {any} chunk + * @param {TransformStreamDefaultController} controller + * @returns {Promise} + * + * @typedef {{ + * start? : TransformerStartCallback, + * transform? : TransformerTransformCallback, + * flush? : TransformerFlushCallback, + * readableType? : any, + * writableType? : any, + * }} Transformer + */ + +class TransformStream { + [kType] = 'TransformStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {Transformer} [transformer] + * @param {QueuingStrategy} [writableStrategy] + * @param {QueuingStrategy} [readableStrategy] + */ + constructor( + transformer = null, + writableStrategy = {}, + readableStrategy = {}) { + const readableType = transformer?.readableType; + const writableType = transformer?.writableType; + const start = transformer?.start; + + if (readableType !== undefined) { + throw new ERR_INVALID_ARG_VALUE.RangeError( + 'transformer.readableType', + readableType); + } + if (writableType !== undefined) { + throw new ERR_INVALID_ARG_VALUE.RangeError( + 'transformer.writableType', + writableType); + } + + const readableHighWaterMark = readableStrategy?.highWaterMark; + const readableSize = readableStrategy?.size; + + const writableHighWaterMark = writableStrategy?.highWaterMark; + const writableSize = writableStrategy?.size; + + const actualReadableHighWaterMark = + extractHighWaterMark(readableHighWaterMark, 0); + const actualReadableSize = extractSizeAlgorithm(readableSize); + + const actualWritableHighWaterMark = + extractHighWaterMark(writableHighWaterMark, 1); + const actualWritableSize = extractSizeAlgorithm(writableSize); + + const startPromise = createDeferredPromise(); + + initializeTransformStream( + this, + startPromise, + actualWritableHighWaterMark, + actualWritableSize, + actualReadableHighWaterMark, + actualReadableSize); + + setupTransformStreamDefaultControllerFromTransformer(this, transformer); + + if (start !== undefined) { + startPromise.resolve( + FunctionPrototypeCall( + start, + transformer, + this[kState].controller)); + } else { + startPromise.resolve(); + } + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {ReadableStream} + */ + get readable() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + return this[kState].readable; + } + + /** + * @readonly + * @type {WritableStream} + */ + get writable() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + return this[kState].writable; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + readable: this.readable, + writable: this.writable, + backpressure: this[kState].backpressure, + }); + } + + [kTransfer]() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + const { + readable, + writable, + } = this[kState]; + if (readable.locked) { + throw new DOMException( + 'Cannot transfer a locked ReadableStream', + 'DataCloneError'); + } + if (writable.locked) { + throw new DOMException( + 'Cannot transfer a locked WritableStream', + 'DataCloneError'); + } + return { + data: { + readable, + writable, + }, + deserializeInfo: + 'internal/webstreams/transformstream:TransferedTransformStream' + }; + } + + [kTransferList]() { + return [ this[kState].readable, this[kState].writable ]; + } + + [kDeserialize]({ readable, writable }) { + this[kState].readable = readable; + this[kState].writable = writable; + } +} + +ObjectDefineProperties(TransformStream.prototype, { + readable: { enumerable: true }, + writable: { enumerable: true }, +}); + +function TransferedTransformStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'TransformStream'; + this[kState] = { + readable: undefined, + writable: undefined, + backpressure: undefined, + backpressureChange: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + controller: undefined, + }; + }, + [], TransformStream)); +} +TransferedTransformStream.prototype[kDeserialize] = () => {}; + +class TransformStreamDefaultController { + [kType] = 'TransformStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + const { + stream, + } = this[kState]; + const { + readable, + } = stream[kState]; + const { + controller: readableController, + } = readable[kState]; + return readableStreamDefaultControllerGetDesiredSize(readableController); + } + + /** + * @param {any} chunk + */ + enqueue(chunk = undefined) { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any} reason + */ + error(reason = undefined) { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerError(this, reason); + } + + terminate() { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerTerminate(this); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + }); + } +} + +ObjectDefineProperties(TransformStreamDefaultController.prototype, { + desiredSize: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, + terminate: { enumerable: true }, +}); + +function createTransformStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'TransformStreamDefaultController'; + }, + [], + TransformStreamDefaultController); +} + +const isTransformStream = + isBrandCheck('TransformStream'); +const isTransformStreamDefaultController = + isBrandCheck('TransformStreamDefaultController'); + +async function defaultTransformAlgorithm(chunk, controller) { + transformStreamDefaultControllerEnqueue(controller, chunk); +} + +function initializeTransformStream( + stream, + startPromise, + writableHighWaterMark, + writableSizeAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm) { + + const writable = new WritableStream({ + start() { return startPromise.promise; }, + write(chunk) { + return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); + }, + abort(reason) { + return transformStreamDefaultSinkAbortAlgorithm(stream, reason); + }, + close() { + return transformStreamDefaultSinkCloseAlgorithm(stream); + }, + }, { + highWaterMark: writableHighWaterMark, + size: writableSizeAlgorithm, + }); + + const readable = new ReadableStream({ + start() { return startPromise.promise; }, + pull() { + return transformStreamDefaultSourcePullAlgorithm(stream); + }, + cancel(reason) { + transformStreamErrorWritableAndUnblockWrite(stream, reason); + return PromiseResolve(); + }, + }, { + highWaterMark: readableHighWaterMark, + size: readableSizeAlgorithm, + }); + + stream[kState] = { + readable, + writable, + controller: undefined, + backpressure: undefined, + backpressureChange: { + promise: undefined, + resolve: undefined, + reject: undefined, + } + }; + + transformStreamSetBackpressure(stream, true); +} + +function transformStreamError(stream, error) { + const { + readable, + } = stream[kState]; + const { + controller, + } = readable[kState]; + readableStreamDefaultControllerError(controller, error); + transformStreamErrorWritableAndUnblockWrite(stream, error); +} + +function transformStreamErrorWritableAndUnblockWrite(stream, error) { + const { + controller, + writable, + } = stream[kState]; + transformStreamDefaultControllerClearAlgorithms(controller); + writableStreamDefaultControllerErrorIfNeeded( + writable[kState].controller, + error); + if (stream[kState].backpressure) + transformStreamSetBackpressure(stream, false); +} + +function transformStreamSetBackpressure(stream, backpressure) { + assert(stream[kState].backpressure !== backpressure); + if (stream[kState].backpressureChange.promise !== undefined) + stream[kState].backpressureChange.resolve?.(); + stream[kState].backpressureChange = createDeferredPromise(); + stream[kState].backpressure = backpressure; +} + +function setupTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm) { + assert(isTransformStream(stream)); + assert(stream[kState].controller === undefined); + controller[kState] = { + stream, + transformAlgorithm, + flushAlgorithm, + }; + stream[kState].controller = controller; +} + +function setupTransformStreamDefaultControllerFromTransformer( + stream, + transformer) { + const controller = createTransformStreamDefaultController(); + const transform = transformer?.transform || defaultTransformAlgorithm; + const flush = transformer?.flush || nonOpFlush; + const transformAlgorithm = + FunctionPrototypeBind(transform, transformer); + const flushAlgorithm = + FunctionPrototypeBind(flush, transformer); + + setupTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm); +} + +function transformStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].transformAlgorithm = undefined; + controller[kState].flushAlgorithm = undefined; +} + +function transformStreamDefaultControllerEnqueue(controller, chunk) { + const { + stream, + } = controller[kState]; + const { + readable, + } = stream[kState]; + const { + controller: readableController, + } = readable[kState]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) + throw new ERR_INVALID_STATE.TypeError('Unable to enqueue'); + try { + readableStreamDefaultControllerEnqueue(readableController, chunk); + } catch (error) { + transformStreamErrorWritableAndUnblockWrite(stream, error); + throw readable[kState].storedError; + } + const backpressure = + readableStreamDefaultControllerHasBackpressure(readableController); + if (backpressure !== stream[kState].backpressure) { + assert(backpressure); + transformStreamSetBackpressure(stream, true); + } +} + +function transformStreamDefaultControllerError(controller, error) { + transformStreamError(controller[kState].stream, error); +} + +function transformStreamDefaultControllerPerformTransform(controller, chunk) { + const transformPromise = + ensureIsPromise( + controller[kState].transformAlgorithm, + controller, + chunk, + controller); + return PromisePrototypeCatch( + transformPromise, + (error) => { + transformStreamError(controller[kState].stream, error); + throw error; + }); +} + +function transformStreamDefaultControllerTerminate(controller) { + const { + stream, + } = controller[kState]; + const { + readable, + } = stream[kState]; + assert(readable !== undefined); + const { + controller: readableController, + } = readable[kState]; + readableStreamDefaultControllerClose(readableController); + transformStreamErrorWritableAndUnblockWrite( + stream, + new ERR_INVALID_STATE.TypeError('TransformStream has been terminated')); +} + +function transformStreamDefaultSinkWriteAlgorithm(stream, chunk) { + const { + writable, + controller, + } = stream[kState]; + assert(writable[kState].state === 'writable'); + if (stream[kState].backpressure) { + const backpressureChange = stream[kState].backpressureChange.promise; + return PromisePrototypeThen( + backpressureChange, + () => { + const { + writable, + } = stream[kState]; + if (writable[kState].state === 'erroring') + throw writable[kState].storedError; + assert(writable[kState].state === 'writable'); + return transformStreamDefaultControllerPerformTransform( + controller, + chunk); + }); + } + return transformStreamDefaultControllerPerformTransform(controller, chunk); +} + +async function transformStreamDefaultSinkAbortAlgorithm(stream, reason) { + transformStreamError(stream, reason); +} + +function transformStreamDefaultSinkCloseAlgorithm(stream) { + const { + readable, + controller, + } = stream[kState]; + + const flushPromise = + ensureIsPromise( + controller[kState].flushAlgorithm, + controller, + controller); + transformStreamDefaultControllerClearAlgorithms(controller); + return PromisePrototypeThen( + flushPromise, + () => { + if (readable[kState].state === 'errored') + throw readable[kState].storedError; + readableStreamDefaultControllerClose(readable[kState].controller); + }, + (error) => { + transformStreamError(stream, error); + throw readable[kState].storedError; + }); +} + +function transformStreamDefaultSourcePullAlgorithm(stream) { + assert(stream[kState].backpressure); + assert(stream[kState].backpressureChange.promise !== undefined); + transformStreamSetBackpressure(stream, false); + return stream[kState].backpressureChange.promise; +} + +module.exports = { + TransformStream, + TransformStreamDefaultController, + TransferedTransformStream, + + // Exported Brand Checks + isTransformStream, + isTransformStreamDefaultController, +}; diff --git a/lib/internal/webstreams/util.js b/lib/internal/webstreams/util.js new file mode 100644 index 00000000000000..e0876caf81b944 --- /dev/null +++ b/lib/internal/webstreams/util.js @@ -0,0 +1,237 @@ +'use strict'; + +const { + ArrayBufferPrototype, + ArrayPrototypePush, + ArrayPrototypeShift, + AsyncIteratorPrototype, + FunctionPrototypeCall, + MathMax, + NumberIsNaN, + ObjectCreate, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + ReflectGet, + Symbol, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_OPERATION_FAILED, + }, +} = require('internal/errors'); + +const { + copyArrayBuffer, + detachArrayBuffer +} = internalBinding('buffer'); + +const { + isPromise, +} = require('util/types'); + +const { + inspect, +} = require('util'); + +const { + getPromiseDetails, + kPending, +} = internalBinding('util'); + +const assert = require('internal/assert'); + +const kState = Symbol('kState'); +const kType = Symbol('kType'); + +const AsyncIterator = ObjectCreate(AsyncIteratorPrototype, { + next: { + configurable: true, + enumerable: true, + writable: true, + }, + return: { + configurable: true, + enumerable: true, + writable: true, + }, +}); + +function extractHighWaterMark(value, defaultHWM) { + if (value === undefined) return defaultHWM; + value = +value; + if (typeof value !== 'number' || + NumberIsNaN(value) || + value < 0) + throw new ERR_INVALID_ARG_VALUE.RangeError('strategy.highWaterMark', value); + return value; +} + +function extractSizeAlgorithm(size) { + if (size === undefined) return () => 1; + if (typeof size !== 'function') + throw new ERR_INVALID_ARG_TYPE('strategy.size', 'Function', size); + return size; +} + +function customInspect(depth, options, name, data) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1 + }; + + return `${name} ${inspect(data, opts)}`; +} + +// These are defensive to work around the possibility that +// the buffer, byteLength, and byteOffset properties on +// ArrayBuffer and ArrayBufferView's may have been tampered with. + +function ArrayBufferViewGetBuffer(view) { + return ReflectGet(view.constructor.prototype, 'buffer', view); +} + +function ArrayBufferViewGetByteLength(view) { + return ReflectGet(view.constructor.prototype, 'byteLength', view); +} + +function ArrayBufferViewGetByteOffset(view) { + return ReflectGet(view.constructor.prototype, 'byteOffset', view); +} + +function ArrayBufferGetByteLength(view) { + return ReflectGet(ArrayBufferPrototype, 'byteLength', view); +} + +function isBrandCheck(brand) { + return (value) => { + return value != null && + value[kState] !== undefined && + value[kType] === brand; + }; +} + +function transferArrayBuffer(buffer) { + const res = detachArrayBuffer(buffer); + if (res === undefined) { + throw new ERR_OPERATION_FAILED.TypeError( + 'The ArrayBuffer could not be transfered'); + } + return res; +} + +function dequeueValue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + assert(controller[kState].queue.length); + const { + value, + size, + } = ArrayPrototypeShift(controller[kState].queue); + controller[kState].queueTotalSize = + MathMax(0, controller[kState].queueTotalSize - size); + return value; +} + +function resetQueue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + controller[kState].queue = []; + controller[kState].queueTotalSize = 0; +} + +function peekQueueValue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + assert(controller[kState].queue.length); + return controller[kState].queue[0].value; +} + +function enqueueValueWithSize(controller, value, size) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + size = +size; + if (typeof size !== 'number' || + size < 0 || + NumberIsNaN(size) || + size === Infinity) { + throw new ERR_INVALID_ARG_VALUE.RangeError('size', size); + } + ArrayPrototypePush(controller[kState].queue, { value, size }); + controller[kState].queueTotalSize += size; +} + +function ensureIsPromise(fn, thisArg, ...args) { + try { + const value = FunctionPrototypeCall(fn, thisArg, ...args); + return isPromise(value) ? value : PromiseResolve(value); + } catch (error) { + return PromiseReject(error); + } +} + +function isPromisePending(promise) { + if (promise === undefined) return false; + const details = getPromiseDetails(promise); + return details?.[0] === kPending; +} + +function setPromiseHandled(promise) { + // Alternatively, we could use the native API + // MarkAsHandled, but this avoids the extra boundary cross + // and is hopefully faster at the cost of an extra Promise + // allocation. + PromisePrototypeThen(promise, () => {}, () => {}); +} + +async function nonOpFlush() {} + +function nonOpStart() {} + +async function nonOpPull() {} + +async function nonOpCancel() {} + +async function nonOpWrite() {} + +let transfer; +function lazyTransfer() { + if (transfer === undefined) + transfer = require('internal/webstreams/transfer'); + return transfer; +} + +module.exports = { + ArrayBufferViewGetBuffer, + ArrayBufferViewGetByteLength, + ArrayBufferViewGetByteOffset, + ArrayBufferGetByteLength, + AsyncIterator, + copyArrayBuffer, + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + isPromisePending, + peekQueueValue, + resetQueue, + setPromiseHandled, + transferArrayBuffer, + nonOpCancel, + nonOpFlush, + nonOpPull, + nonOpStart, + nonOpWrite, + kType, + kState, +}; diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js new file mode 100644 index 00000000000000..793ae9e9ad8fb4 --- /dev/null +++ b/lib/internal/webstreams/writablestream.js @@ -0,0 +1,1329 @@ +'use strict'; + +/* eslint-disable no-use-before-define */ + +const { + ArrayPrototypePush, + ArrayPrototypeShift, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + ReflectConstruct, + Symbol, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + MessageChannel, +} = require('internal/worker/io'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + isPromisePending, + peekQueueValue, + resetQueue, + setPromiseHandled, + nonOpCancel, + nonOpStart, + nonOpWrite, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + AbortController, +} = require('internal/abort_controller'); + +const assert = require('internal/assert'); + +const kAbort = Symbol('kAbort'); +const kCloseSentinel = Symbol('kCloseSentinel'); +const kError = Symbol('kError'); + +/** + * @typedef {import('../abort_controller').AbortSignal} AbortSignal + * @typedef {import('./queuingstrategies').QueuingStrategy + * } QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * + * @callback UnderlyingSinkStartCallback + * @param {WritableStreamDefaultController} controller + * + * @callback UnderlyingSinkWriteCallback + * @param {any} chunk + * @param {WritableStreamDefaultController} controller + * @returns {Promise} + * + * @callback UnderlyingSinkCloseCallback + * @returns {Promise} + * + * @callback UnderlyingSinkAbortCallback + * @param {any} reason + * @returns {Promise} + * + * @typedef {{ + * start? : UnderlyingSinkStartCallback, + * write? : UnderlyingSinkWriteCallback, + * close? : UnderlyingSinkCloseCallback, + * abort? : UnderlyingSinkAbortCallback, + * type? : any, + * }} UnderlyingSink + */ + +class WritableStream { + [kType] = 'WritableStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {UnderlyingSink} [sink] + * @param {QueuingStrategy} [strategy] + */ + constructor(sink = null, strategy = {}) { + const type = sink?.type; + if (type !== undefined) + throw new ERR_INVALID_ARG_VALUE.RangeError('type', type); + + this[kState] = { + close: createDeferredPromise(), + closeRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightWriteRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightCloseRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + pendingAbortRequest: { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }, + backpressure: false, + controller: undefined, + state: 'writable', + storedError: undefined, + writeRequests: [], + writer: undefined, + transfer: { + readable: undefined, + port1: undefined, + port2: undefined, + promise: undefined, + } + }; + + const size = extractSizeAlgorithm(strategy?.size); + const highWaterMark = extractHighWaterMark(strategy?.highWaterMark, 1); + + setupWritableStreamDefaultControllerFromSink( + this, + sink, + highWaterMark, + size); + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {boolean} + */ + get locked() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + return isWritableStreamLocked(this); + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + if (!isWritableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStream')); + if (isWritableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is locked')); + } + return writableStreamAbort(this, reason); + } + + /** + * @returns {Promise} + */ + close() { + if (!isWritableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStream')); + if (isWritableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is locked')); + } + if (writableStreamCloseQueuedOrInFlight(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Failure closing WritableStream')); + } + return writableStreamClose(this); + } + + /** + * @returns {WritableStreamDefaultWriter} + */ + getWriter() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + return new WritableStreamDefaultWriter(this); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + locked: this.locked, + state: this[kState].state, + }); + } + + [kTransfer]() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + if (this.locked) { + this[kState].transfer.port1?.close(); + this[kState].transfer.port1 = undefined; + this[kState].transfer.port2 = undefined; + throw new DOMException( + 'Cannot transfer a locked WritableStream', + 'DataCloneError'); + } + + const { + readable, + promise, + } = lazyTransfer().newCrossRealmReadableStream( + this, + this[kState].transfer.port1); + + this[kState].transfer.readable = readable; + this[kState].transfer.promise = promise; + + setPromiseHandled(this[kState].transfer.promise); + + return { + data: { port: this[kState].transfer.port2 }, + deserializeInfo: + 'internal/webstreams/writablestream:TransferedWritableStream' + }; + } + + [kTransferList]() { + const { port1, port2 } = new MessageChannel(); + this[kState].transfer.port1 = port1; + this[kState].transfer.port2 = port2; + return [ port2 ]; + } + + [kDeserialize]({ port }) { + const transfer = lazyTransfer(); + setupWritableStreamDefaultControllerFromSink( + this, + new transfer.CrossRealmTransformWritableSink(port), + 1, + () => 1); + } +} + +ObjectDefineProperties(WritableStream.prototype, { + locked: { enumerable: true }, + abort: { enumerable: true }, + close: { enumerable: true }, + getWriter: { enumerable: true }, +}); + +function TransferedWritableStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'WritableStream'; + this[kState] = { + close: createDeferredPromise(), + closeRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightWriteRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightCloseRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + pendingAbortRequest: { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }, + backpressure: false, + controller: undefined, + state: 'writable', + storedError: undefined, + writeRequests: [], + writer: undefined, + transfer: { + promise: undefined, + port1: undefined, + port2: undefined, + readable: undefined, + }, + }; + }, + [], WritableStream)); +} +TransferedWritableStream.prototype[kDeserialize] = () => {}; + +class WritableStreamDefaultWriter { + [kType] = 'WritableStreamDefaultWriter'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {WritableStream} stream + */ + constructor(stream) { + if (!isWritableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'WritableStream', stream); + this[kState] = { + stream: undefined, + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + ready: { + promise: undefined, + resolve: undefined, + reject: undefined, + } + }; + setupWritableStreamDefaultWriter(this, stream); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + return this[kState].close.promise; + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isWritableStreamDefaultWriter(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultWriter'); + if (this[kState].stream === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream'); + } + return writableStreamDefaultWriterGetDesiredSize(this); + } + + /** + * @readonly + * @type {Promise} + */ + get ready() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + return this[kState].ready.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + return writableStreamDefaultWriterAbort(this, reason); + } + + /** + * @returns {Promise} + */ + close() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + const { + stream, + } = this[kState]; + if (stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + if (writableStreamCloseQueuedOrInFlight(stream)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Failure to close WritableStream')); + } + return writableStreamDefaultWriterClose(this); + } + + releaseLock() { + if (!isWritableStreamDefaultWriter(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultWriter'); + const { + stream, + } = this[kState]; + if (stream === undefined) + return; + assert(stream[kState].writer !== undefined); + writableStreamDefaultWriterRelease(this); + } + + /** + * @param {any} chunk + * @returns {Promise} + */ + write(chunk = undefined) { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + return writableStreamDefaultWriterWrite(this, chunk); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + close: this[kState].close.promise, + ready: this[kState].ready.promise, + desiredSize: this.desiredSize, + }); + } +} + +ObjectDefineProperties(WritableStreamDefaultWriter.prototype, { + closed: { enumerable: true }, + ready: { enumerable: true }, + desiredSize: { enumerable: true }, + abort: { enumerable: true }, + close: { enumerable: true }, + releaseLock: { enumerable: true }, + write: { enumerable: true }, +}); + +class WritableStreamDefaultController { + [kType] = 'WritableStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + [kAbort](reason) { + const result = this[kState].abortAlgorithm(reason); + writableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [kError]() { + resetQueue(this); + } + + /** + * @type {any} + */ + get abortReason() { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + return this[kState].abortReason; + } + + /** + * @type {AbortSignal} + */ + get signal() { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + return this[kState].abortController.signal; + } + + /** + * @param {any} error + */ + error(error = undefined) { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + if (this[kState].stream[kState].state !== 'writable') + return; + writableStreamDefaultControllerError(this, error); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + }); + } +} + +ObjectDefineProperties(WritableStreamDefaultController.prototype, { + abortReason: { enumerable: true }, + signal: { enumerable: true }, + error: { enumerable: true }, +}); + +function createWritableStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'WritableStreamDefaultController'; + }, + [], WritableStreamDefaultController); +} + +const isWritableStream = + isBrandCheck('WritableStream'); +const isWritableStreamDefaultWriter = + isBrandCheck('WritableStreamDefaultWriter'); +const isWritableStreamDefaultController = + isBrandCheck('WritableStreamDefaultController'); + +function isWritableStreamLocked(stream) { + return stream[kState].writer !== undefined; +} + +function setupWritableStreamDefaultWriter(writer, stream) { + if (isWritableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('WritableStream is locked'); + writer[kState].stream = stream; + stream[kState].writer = writer; + switch (stream[kState].state) { + case 'writable': + if (!writableStreamCloseQueuedOrInFlight(stream) && + stream[kState].backpressure) { + writer[kState].ready = createDeferredPromise(); + } else { + writer[kState].ready = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + } + setClosedPromiseToNewPromise(); + break; + case 'erroring': + writer[kState].ready = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(writer[kState].ready.promise); + setClosedPromiseToNewPromise(); + break; + case 'closed': + writer[kState].ready = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + writer[kState].close = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + break; + default: + writer[kState].ready = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + writer[kState].close = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(writer[kState].ready.promise); + setPromiseHandled(writer[kState].close.promise); + } + + function setClosedPromiseToNewPromise() { + writer[kState].close = createDeferredPromise(); + } +} + +function writableStreamAbort(stream, reason) { + const { + state, + controller, + } = stream[kState]; + if (state === 'closed' || state === 'errored') + return PromiseResolve(); + + controller[kState].abortReason = reason; + controller[kState].abortController.abort(); + + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) + return stream[kState].pendingAbortRequest.abort.promise; + + assert(state === 'writable' || state === 'erroring'); + + let wasAlreadyErroring = false; + if (state === 'erroring') { + wasAlreadyErroring = true; + reason = undefined; + } + + const abort = createDeferredPromise(); + + stream[kState].pendingAbortRequest = { + abort, + reason, + wasAlreadyErroring, + }; + + if (!wasAlreadyErroring) + writableStreamStartErroring(stream, reason); + + return abort.promise; +} + +function writableStreamClose(stream) { + const { + state, + writer, + backpressure, + controller, + } = stream[kState]; + if (state === 'closed' || state === 'errored') { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is closed')); + } + assert(state === 'writable' || state === 'erroring'); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + stream[kState].closeRequest = createDeferredPromise(); + const { promise } = stream[kState].closeRequest; + if (writer !== undefined && backpressure && state === 'writable') + writer[kState].ready.resolve?.(); + writableStreamDefaultControllerClose(controller); + return promise; +} + +function writableStreamUpdateBackpressure(stream, backpressure) { + assert(stream[kState].state === 'writable'); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + const { + writer, + } = stream[kState]; + if (writer !== undefined && stream[kState].backpressure !== backpressure) { + if (backpressure) { + writer[kState].ready = createDeferredPromise(); + } else { + writer[kState].ready.resolve?.(); + } + } + stream[kState].backpressure = backpressure; +} + +function writableStreamStartErroring(stream, reason) { + assert(stream[kState].storedError === undefined); + assert(stream[kState].state === 'writable'); + const { + controller, + writer, + } = stream[kState]; + assert(controller !== undefined); + stream[kState].state = 'erroring'; + stream[kState].storedError = reason; + if (writer !== undefined) { + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); + } + if (!writableStreamHasOperationMarkedInFlight(stream) && + controller[kState].started) { + writableStreamFinishErroring(stream); + } +} + +function writableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { + assert(stream[kState].state === 'errored'); + if (stream[kState].closeRequest.promise !== undefined) { + assert(stream[kState].inFlightCloseRequest.promise === undefined); + stream[kState].closeRequest.reject?.(stream[kState].storedError); + stream[kState].closeRequest = { + promise: undefined, + reject: undefined, + resolve: undefined, + }; + } + const { + writer, + } = stream[kState]; + if (writer !== undefined) { + writer[kState].close.reject?.(stream[kState].storedError); + setPromiseHandled(writer[kState].close.promise); + } +} + +function writableStreamMarkFirstWriteRequestInFlight(stream) { + assert(stream[kState].inFlightWriteRequest.promise === undefined); + assert(stream[kState].writeRequests.length); + const writeRequest = ArrayPrototypeShift(stream[kState].writeRequests); + stream[kState].inFlightWriteRequest = writeRequest; +} + +function writableStreamMarkCloseRequestInFlight(stream) { + assert(stream[kState].inFlightWriteRequest.promise === undefined); + assert(stream[kState].closeRequest.promise !== undefined); + stream[kState].inFlightCloseRequest = stream[kState].closeRequest; + stream[kState].closeRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; +} + +function writableStreamHasOperationMarkedInFlight(stream) { + const { + inFlightWriteRequest, + inFlightCloseRequest, + } = stream[kState]; + if (inFlightWriteRequest.promise === undefined && + inFlightCloseRequest.promise === undefined) { + return false; + } + return true; +} + +function writableStreamFinishInFlightWriteWithError(stream, error) { + assert(stream[kState].inFlightWriteRequest.promise !== undefined); + stream[kState].inFlightWriteRequest.reject?.(error); + stream[kState].inFlightWriteRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + writableStreamDealWithRejection(stream, error); +} + +function writableStreamFinishInFlightWrite(stream) { + assert(stream[kState].inFlightWriteRequest.promise !== undefined); + stream[kState].inFlightWriteRequest.resolve?.(); + stream[kState].inFlightWriteRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; +} + +function writableStreamFinishInFlightCloseWithError(stream, error) { + assert(stream[kState].inFlightCloseRequest.promise !== undefined); + stream[kState].inFlightCloseRequest.reject?.(error); + stream[kState].inFlightCloseRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) { + stream[kState].pendingAbortRequest.abort.reject?.(error); + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + } + writableStreamDealWithRejection(stream, error); +} + +function writableStreamFinishInFlightClose(stream) { + assert(stream[kState].inFlightCloseRequest.promise !== undefined); + stream[kState].inFlightCloseRequest.resolve?.(); + stream[kState].inFlightCloseRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + if (stream[kState].state === 'erroring') { + stream[kState].storedError = undefined; + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) { + stream[kState].pendingAbortRequest.abort.resolve?.(); + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + } + } + stream[kState].state = 'closed'; + if (stream[kState].writer !== undefined) + stream[kState].writer[kState].close.resolve?.(); + assert(stream[kState].pendingAbortRequest.abort.promise === undefined); + assert(stream[kState].storedError === undefined); +} + +function writableStreamFinishErroring(stream) { + assert(stream[kState].state === 'erroring'); + assert(!writableStreamHasOperationMarkedInFlight(stream)); + stream[kState].state = 'errored'; + stream[kState].controller[kError](); + const storedError = stream[kState].storedError; + for (let n = 0; n < stream[kState].writeRequests.length; n++) + stream[kState].writeRequests[n].reject?.(storedError); + stream[kState].writeRequests = []; + + if (stream[kState].pendingAbortRequest.abort.promise === undefined) { + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + + const abortRequest = stream[kState].pendingAbortRequest; + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + if (abortRequest.wasAlreadyErroring) { + abortRequest.abort.reject?.(storedError); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + PromisePrototypeThen( + ensureIsPromise( + stream[kState].controller[kAbort], + stream[kState].controller, + abortRequest.reason), + () => { + abortRequest.abort.resolve?.(); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }, + (error) => { + abortRequest.abort.reject?.(error); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }); +} + +function writableStreamDealWithRejection(stream, error) { + const { + state, + } = stream[kState]; + if (state === 'writable') { + writableStreamStartErroring(stream, error); + return; + } + + assert(state === 'erroring'); + writableStreamFinishErroring(stream); +} + +function writableStreamCloseQueuedOrInFlight(stream) { + if (stream[kState].closeRequest.promise === undefined && + stream[kState].inFlightCloseRequest.promise === undefined) { + return false; + } + return true; +} + +function writableStreamAddWriteRequest(stream) { + assert(isWritableStreamLocked(stream)); + assert(stream[kState].state === 'writable'); + const { + promise, + resolve, + reject, + } = createDeferredPromise(); + ArrayPrototypePush( + stream[kState].writeRequests, + { + promise, + resolve, + reject, + }); + return promise; +} + +function writableStreamDefaultWriterWrite(writer, chunk) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + const { + controller, + } = stream[kState]; + const chunkSize = writableStreamDefaultControllerGetChunkSize( + controller, + chunk); + if (stream !== writer[kState].stream) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Mismatched WritableStreams')); + } + const { + state, + } = stream[kState]; + + if (state === 'errored') + return PromiseReject(stream[kState].storedError); + + if (writableStreamCloseQueuedOrInFlight(stream) || state === 'closed') { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is closed')); + } + + if (state === 'erroring') + return PromiseReject(stream[kState].storedError); + + assert(state === 'writable'); + + const promise = writableStreamAddWriteRequest(stream); + writableStreamDefaultControllerWrite(controller, chunk, chunkSize); + return promise; +} + +function writableStreamDefaultWriterRelease(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + assert(stream[kState].writer === writer); + const releasedError = + new ERR_INVALID_STATE.TypeError('Writer has been released'); + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); + writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); + stream[kState].writer = undefined; + writer[kState].stream = undefined; +} + +function writableStreamDefaultWriterGetDesiredSize(writer) { + const { + stream, + } = writer[kState]; + switch (stream[kState].state) { + case 'errored': + // Fall through + case 'erroring': + return null; + case 'closed': + return 0; + } + return writableStreamDefaultControllerGetDesiredSize( + stream[kState].controller); +} + +function writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) { + if (isPromisePending(writer[kState].ready.promise)) { + writer[kState].ready.reject?.(error); + } else { + writer[kState].ready = { + promise: PromiseReject(error), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(writer[kState].ready.promise); +} + +function writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) { + if (isPromisePending(writer[kState].close.promise)) { + writer[kState].close.reject?.(error); + } else { + writer[kState].close = { + promise: PromiseReject(error), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(writer[kState].close.promise); +} + +function writableStreamDefaultWriterCloseWithErrorPropagation(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + const { + state, + } = stream[kState]; + if (writableStreamCloseQueuedOrInFlight(stream) || state === 'closed') + return PromiseResolve(); + + if (state === 'errored') + return PromiseReject(stream[kState].storedError); + + assert(state === 'writable' || state === 'erroring'); + + return writableStreamDefaultWriterClose(writer); +} + +function writableStreamDefaultWriterClose(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + return writableStreamClose(stream); +} + +function writableStreamDefaultWriterAbort(writer, reason) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + return writableStreamAbort(stream, reason); +} + +function writableStreamDefaultControllerWrite(controller, chunk, chunkSize) { + try { + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return; + } + const { + stream, + } = controller[kState]; + if (!writableStreamCloseQueuedOrInFlight(stream) && + stream[kState].state === 'writable') { + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +function writableStreamDefaultControllerProcessWrite(controller, chunk) { + const { + stream, + writeAlgorithm, + } = controller[kState]; + writableStreamMarkFirstWriteRequestInFlight(stream); + + PromisePrototypeThen( + ensureIsPromise(writeAlgorithm, controller, chunk, controller), + () => { + writableStreamFinishInFlightWrite(stream); + const { + state, + } = stream[kState]; + assert(state === 'writable' || state === 'erroring'); + dequeueValue(controller); + if (!writableStreamCloseQueuedOrInFlight(stream) && + state === 'writable') { + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (error) => { + if (stream[kState].state === 'writable') + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamFinishInFlightWriteWithError(stream, error); + }); + +} + +function writableStreamDefaultControllerProcessClose(controller) { + const { + closeAlgorithm, + queue, + stream, + } = controller[kState]; + writableStreamMarkCloseRequestInFlight(stream); + dequeueValue(controller); + assert(!queue.length); + const sinkClosePromise = ensureIsPromise(closeAlgorithm, controller); + writableStreamDefaultControllerClearAlgorithms(controller); + PromisePrototypeThen( + sinkClosePromise, + () => writableStreamFinishInFlightClose(stream), + (error) => writableStreamFinishInFlightCloseWithError(stream, error)); +} + +function writableStreamDefaultControllerGetDesiredSize(controller) { + const { + highWaterMark, + queueTotalSize, + } = controller[kState]; + return highWaterMark - queueTotalSize; +} + +function writableStreamDefaultControllerGetChunkSize(controller, chunk) { + try { + return FunctionPrototypeCall( + controller[kState].sizeAlgorithm, + undefined, + chunk); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return 1; + } +} + +function writableStreamDefaultControllerErrorIfNeeded(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state === 'writable') + writableStreamDefaultControllerError(controller, error); +} + +function writableStreamDefaultControllerError(controller, error) { + const { + stream, + } = controller[kState]; + assert(stream[kState].state === 'writable'); + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamStartErroring(stream, error); +} + +function writableStreamDefaultControllerClose(controller) { + enqueueValueWithSize(controller, kCloseSentinel, 0); + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +function writableStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].writeAlgorithm = undefined; + controller[kState].closeAlgorithm = undefined; + controller[kState].abortAlgorithm = undefined; + controller[kState].sizeAlgorithm = undefined; +} + +function writableStreamDefaultControllerGetBackpressure(controller) { + return writableStreamDefaultControllerGetDesiredSize(controller) <= 0; +} + +function writableStreamDefaultControllerAdvanceQueueIfNeeded(controller) { + const { + queue, + started, + stream, + } = controller[kState]; + if (!started || stream[kState].inFlightWriteRequest.promise !== undefined) + return; + + if (stream[kState].state === 'erroring') { + writableStreamFinishErroring(stream); + return; + } + + if (!queue.length) + return; + + const value = peekQueueValue(controller); + if (value === kCloseSentinel) + writableStreamDefaultControllerProcessClose(controller); + else + writableStreamDefaultControllerProcessWrite(controller, value); +} + +function setupWritableStreamDefaultControllerFromSink( + stream, + sink, + highWaterMark, + sizeAlgorithm) { + const controller = createWritableStreamDefaultController(); + const start = sink?.start; + const write = sink?.write; + const close = sink?.close; + const abort = sink?.abort; + const startAlgorithm = start ? + FunctionPrototypeBind(start, sink, controller) : + nonOpStart; + const writeAlgorithm = write ? + FunctionPrototypeBind(write, sink) : + nonOpWrite; + const closeAlgorithm = close ? + FunctionPrototypeBind(close, sink) : nonOpCancel; + const abortAlgorithm = abort ? + FunctionPrototypeBind(abort, sink) : nonOpCancel; + setupWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm); +} + +function setupWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm) { + assert(isWritableStream(stream)); + assert(stream[kState].controller === undefined); + controller[kState] = { + abortAlgorithm, + abortReason: undefined, + closeAlgorithm, + highWaterMark, + queue: [], + queueTotalSize: 0, + abortController: new AbortController(), + sizeAlgorithm, + started: false, + stream, + writeAlgorithm, + }; + stream[kState].controller = controller; + + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + controller[kState].started = true; + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (error) => { + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + controller[kState].started = true; + writableStreamDealWithRejection(stream, error); + }); +} + +module.exports = { + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + TransferedWritableStream, + + // Exported Brand Checks + isWritableStream, + isWritableStreamDefaultController, + isWritableStreamDefaultWriter, + + isWritableStreamLocked, + setupWritableStreamDefaultWriter, + writableStreamAbort, + writableStreamClose, + writableStreamUpdateBackpressure, + writableStreamStartErroring, + writableStreamRejectCloseAndClosedPromiseIfNeeded, + writableStreamMarkFirstWriteRequestInFlight, + writableStreamMarkCloseRequestInFlight, + writableStreamHasOperationMarkedInFlight, + writableStreamFinishInFlightWriteWithError, + writableStreamFinishInFlightWrite, + writableStreamFinishInFlightCloseWithError, + writableStreamFinishInFlightClose, + writableStreamFinishErroring, + writableStreamDealWithRejection, + writableStreamCloseQueuedOrInFlight, + writableStreamAddWriteRequest, + writableStreamDefaultWriterWrite, + writableStreamDefaultWriterRelease, + writableStreamDefaultWriterGetDesiredSize, + writableStreamDefaultWriterEnsureReadyPromiseRejected, + writableStreamDefaultWriterEnsureClosedPromiseRejected, + writableStreamDefaultWriterCloseWithErrorPropagation, + writableStreamDefaultWriterClose, + writableStreamDefaultWriterAbort, + writableStreamDefaultControllerWrite, + writableStreamDefaultControllerProcessWrite, + writableStreamDefaultControllerProcessClose, + writableStreamDefaultControllerGetDesiredSize, + writableStreamDefaultControllerGetChunkSize, + writableStreamDefaultControllerErrorIfNeeded, + writableStreamDefaultControllerError, + writableStreamDefaultControllerClose, + writableStreamDefaultControllerClearAlgorithms, + writableStreamDefaultControllerGetBackpressure, + writableStreamDefaultControllerAdvanceQueueIfNeeded, + setupWritableStreamDefaultControllerFromSink, + setupWritableStreamDefaultController, +}; + +/* eslint-enable no-use-before-define */ diff --git a/lib/stream/web.js b/lib/stream/web.js new file mode 100644 index 00000000000000..929abd19044458 --- /dev/null +++ b/lib/stream/web.js @@ -0,0 +1,48 @@ +'use strict'; + +const { + emitExperimentalWarning, +} = require('internal/util'); + +emitExperimentalWarning('stream/web'); + +const { + TransformStream, + TransformStreamDefaultController, +} = require('internal/webstreams/transformstream'); + +const { + WritableStream, + WritableStreamDefaultController, + WritableStreamDefaultWriter, +} = require('internal/webstreams/writablestream'); + +const { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, +} = require('internal/webstreams/readablestream'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +} = require('internal/webstreams/queuingstrategies'); + +module.exports = { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransformStream, + TransformStreamDefaultController, + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + ByteLengthQueuingStrategy, + CountQueuingStrategy, +}; diff --git a/src/node_buffer.cc b/src/node_buffer.cc index e816ba131644ad..b5651b5e325fc9 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -67,6 +67,7 @@ using v8::MaybeLocal; using v8::Nothing; using v8::Number; using v8::Object; +using v8::SharedArrayBuffer; using v8::String; using v8::Uint32; using v8::Uint32Array; @@ -1158,6 +1159,60 @@ void GetZeroFillToggle(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(Uint32Array::New(ab, 0, 1)); } +void DetachArrayBuffer(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + if (args[0]->IsArrayBuffer()) { + Local buf = args[0].As(); + if (buf->IsDetachable()) { + std::shared_ptr store = buf->GetBackingStore(); + buf->Detach(); + args.GetReturnValue().Set(ArrayBuffer::New(env->isolate(), store)); + } + } +} + +void CopyArrayBuffer(const FunctionCallbackInfo& args) { + // args[0] == Destination ArrayBuffer + // args[1] == Destination ArrayBuffer Offset + // args[2] == Source ArrayBuffer + // args[3] == Source ArrayBuffer Offset + // args[4] == bytesToCopy + + CHECK(args[0]->IsArrayBuffer() || args[0]->IsSharedArrayBuffer()); + CHECK(args[1]->IsUint32()); + CHECK(args[2]->IsArrayBuffer() || args[2]->IsSharedArrayBuffer()); + CHECK(args[3]->IsUint32()); + CHECK(args[4]->IsUint32()); + + std::shared_ptr destination; + std::shared_ptr source; + + if (args[0]->IsArrayBuffer()) { + destination = args[0].As()->GetBackingStore(); + } else if (args[0]->IsSharedArrayBuffer()) { + destination = args[0].As()->GetBackingStore(); + } + + if (args[2]->IsArrayBuffer()) { + source = args[2].As()->GetBackingStore(); + } else if (args[0]->IsSharedArrayBuffer()) { + source = args[2].As()->GetBackingStore(); + } + + uint32_t destination_offset = args[1].As()->Value(); + uint32_t source_offset = args[3].As()->Value(); + size_t bytes_to_copy = args[4].As()->Value(); + + CHECK_GE(destination->ByteLength() - destination_offset, bytes_to_copy); + CHECK_GE(source->ByteLength() - source_offset, bytes_to_copy); + + uint8_t* dest = + static_cast(destination->Data()) + destination_offset; + uint8_t* src = + static_cast(source->Data()) + source_offset; + memcpy(dest, src, bytes_to_copy); +} + void Initialize(Local target, Local unused, Local context, @@ -1176,6 +1231,9 @@ void Initialize(Local target, env->SetMethodNoSideEffect(target, "indexOfNumber", IndexOfNumber); env->SetMethodNoSideEffect(target, "indexOfString", IndexOfString); + env->SetMethod(target, "detachArrayBuffer", DetachArrayBuffer); + env->SetMethod(target, "copyArrayBuffer", CopyArrayBuffer); + env->SetMethod(target, "swap16", Swap16); env->SetMethod(target, "swap32", Swap32); env->SetMethod(target, "swap64", Swap64); @@ -1251,6 +1309,9 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(StringWrite); registry->Register(GetZeroFillToggle); + registry->Register(DetachArrayBuffer); + registry->Register(CopyArrayBuffer); + Blob::RegisterExternalReferences(registry); FixedSizeBlobCopyJob::RegisterExternalReferences(registry); } diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 86f2eaada97b3b..60d3aeb98c17da 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -21,7 +21,7 @@ Last update: - html/webappapis/timers: https://github.com/web-platform-tests/wpt/tree/5873f2d8f1/html/webappapis/timers - interfaces: https://github.com/web-platform-tests/wpt/tree/fcb671ed8b/interfaces - resources: https://github.com/web-platform-tests/wpt/tree/972ca5b669/resources -- streams: https://github.com/web-platform-tests/wpt/tree/b869e60df1/streams +- streams: https://github.com/web-platform-tests/wpt/tree/8f60d94439/streams - url: https://github.com/web-platform-tests/wpt/tree/1fcb39223d/url [Web Platform Tests]: https://github.com/web-platform-tests/wpt diff --git a/test/fixtures/wpt/streams/readable-byte-streams/general.any.js b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js index db8ac3a39983fd..9aa508225865c8 100644 --- a/test/fixtures/wpt/streams/readable-byte-streams/general.any.js +++ b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js @@ -957,7 +957,8 @@ promise_test(() => { assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0201); + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0102); return reader.read(new Uint8Array(1)); }).then(result => { @@ -1138,7 +1139,7 @@ promise_test(() => { assert_equals(pullCount, 1, '1 pull() should have been made in response to partial fill by enqueue()'); assert_not_equals(byobRequest, null, 'byobRequest should not be null'); - assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() shouild be 2'); + assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() should be 2'); assert_equals(viewInfos[1].byteLength, 1, 'byteLength after enqueue() should be 1'); reader.cancel(); @@ -1326,7 +1327,9 @@ promise_test(() => { const view = result.value; assert_equals(view.byteOffset, 0); assert_equals(view.byteLength, 2); - assert_equals(view[0], 0xaaff); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0xffaa); assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2'); @@ -1381,7 +1384,9 @@ promise_test(() => { assert_equals(view.buffer.byteLength, 4, 'buffer.byteLength'); assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0001, 'Contents are set'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0100, 'contents are set'); const p = reader.read(new Uint16Array(1)); @@ -1395,7 +1400,9 @@ promise_test(() => { assert_equals(view.buffer.byteLength, 2, 'buffer.byteLength'); assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0302, 'Contents are set'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0203, 'contents are set'); assert_not_equals(byobRequest, null, 'byobRequest must not be null'); assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js index 2dcab69f42db0d..7c0bffb78710fe 100644 --- a/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js +++ b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js @@ -25,7 +25,7 @@ test(t => { const memory = new WebAssembly.Memory({ initial: 1 }); const view = new Uint8Array(memory.buffer, 0, 1); - assert_throws_js(t, TypeError, controller.enqueue(view)); + assert_throws_js(TypeError, () => controller.enqueue(view)); }, 'ReadableStream with byte source: enqueue() with a non-transferable buffer'); promise_test(async t => { @@ -54,5 +54,5 @@ promise_test(async t => { ); await pullCalledPromise; - assert_throws_js(t, TypeError, byobRequest.respondWithNewView(newView)); + assert_throws_js(TypeError, () => byobRequest.respondWithNewView(newView)); }, 'ReadableStream with byte source: respondWithNewView() with a non-transferable buffer'); diff --git a/test/fixtures/wpt/streams/writable-streams/aborting.any.js b/test/fixtures/wpt/streams/writable-streams/aborting.any.js index 5c053bab915700..ab154a705ed0e9 100644 --- a/test/fixtures/wpt/streams/writable-streams/aborting.any.js +++ b/test/fixtures/wpt/streams/writable-streams/aborting.any.js @@ -1376,3 +1376,111 @@ promise_test(t => { return promise_rejects_js(t, TypeError, ws.abort(), 'abort should reject') .then(() => writer.ready); }, 'abort on a locked stream should reject'); + +test(t => { + let ctrl; + const ws = new WritableStream({start(c) { ctrl = c; }}); + const e = Error('hello'); + + assert_true(ctrl.signal instanceof AbortSignal); + assert_false(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); + ws.abort(e); + assert_true(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, e); +}, 'WritableStreamDefaultController.signal'); + +promise_test(async t => { + let ctrl; + let resolve; + const called = new Promise(r => resolve = r); + + const ws = new WritableStream({ + start(c) { ctrl = c; }, + write() { resolve(); return new Promise(() => {}); } + }); + const writer = ws.getWriter(); + + writer.write(99); + await called; + + assert_false(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); + writer.abort(); + assert_true(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); +}, 'the abort signal is signalled synchronously - write'); + +promise_test(async t => { + let ctrl; + let resolve; + const called = new Promise(r => resolve = r); + + const ws = new WritableStream({ + start(c) { ctrl = c; }, + close() { resolve(); return new Promise(() => {}); } + }); + const writer = ws.getWriter(); + + writer.close(99); + await called; + + assert_false(ctrl.signal.aborted); + writer.abort(); + assert_true(ctrl.signal.aborted); +}, 'the abort signal is signalled synchronously - close'); + +promise_test(async t => { + let ctrl; + const ws = new WritableStream({start(c) { ctrl = c; }}); + const writer = ws.getWriter(); + + const e = TypeError(); + ctrl.error(e); + await promise_rejects_exactly(t, e, writer.closed); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on error'); + +promise_test(async t => { + let ctrl; + const e = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + async write() { throw e; } + }); + const writer = ws.getWriter(); + + await promise_rejects_exactly(t, e, writer.write('hello'), 'write result'); + await promise_rejects_exactly(t, e, writer.closed, 'closed'); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on write failure'); + +promise_test(async t => { + let ctrl; + const e = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + async close() { throw e; } + }); + const writer = ws.getWriter(); + + await promise_rejects_exactly(t, e, writer.close(), 'close result'); + await promise_rejects_exactly(t, e, writer.closed, 'closed'); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on close failure'); + +promise_test(async t => { + let ctrl; + const e1 = SyntaxError(); + const e2 = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + }); + + const writer = ws.getWriter(); + ctrl.signal.addEventListener('abort', () => writer.abort(e2)); + writer.abort(e1); + assert_true(ctrl.signal.aborted); + + await promise_rejects_exactly(t, e2, writer.closed, 'closed'); +}, 'recursive abort() call'); diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index 50316e8c583eb1..6567782a1d47c9 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -44,7 +44,7 @@ "path": "resources" }, "streams": { - "commit": "b869e60df1b8d3840e09b41c5e987c7e23f6856c", + "commit": "8f60d9443949c323522a2009518d54d5d6ab5541", "path": "streams" }, "url": { diff --git a/test/parallel/test-whatwg-readablebytestream.js b/test/parallel/test-whatwg-readablebytestream.js new file mode 100644 index 00000000000000..eb4355505053ef --- /dev/null +++ b/test/parallel/test-whatwg-readablebytestream.js @@ -0,0 +1,238 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + ReadableStream, + ReadableByteStreamController, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, +} = require('stream/web'); + +const { + kState, +} = require('internal/webstreams/util'); + +const { + open, +} = require('fs/promises'); + +const { + readFileSync, +} = require('fs'); + +const { + Buffer, +} = require('buffer'); + +const { + inspect, +} = require('util'); + +{ + const r = new ReadableStream({ + type: 'bytes', + }); + + assert(r[kState].controller instanceof ReadableByteStreamController); + + assert.strictEqual(typeof r.locked, 'boolean'); + assert.strictEqual(typeof r.cancel, 'function'); + assert.strictEqual(typeof r.getReader, 'function'); + assert.strictEqual(typeof r.pipeThrough, 'function'); + assert.strictEqual(typeof r.pipeTo, 'function'); + assert.strictEqual(typeof r.tee, 'function'); + + ['', null, 'asdf'].forEach((mode) => { + assert.throws(() => r.getReader({ mode }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + }); + + [1, 'asdf'].forEach((options) => { + assert.throws(() => r.getReader(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + assert(!r.locked); + const defaultReader = r.getReader(); + assert(r.locked); + assert(defaultReader instanceof ReadableStreamDefaultReader); + defaultReader.releaseLock(); + const byobReader = r.getReader({ mode: 'byob' }); + assert(byobReader instanceof ReadableStreamBYOBReader); +} + +class Source { + constructor() { + this.controllerClosed = false; + } + + async start(controller) { + this.file = await open(__filename); + this.controller = controller; + } + + async pull(controller) { + const byobRequest = controller.byobRequest; + assert.match(inspect(byobRequest), /ReadableStreamBYOBRequest/); + + const view = byobRequest.view; + const { + bytesRead, + } = await this.file.read({ + buffer: view, + offset: view.byteOffset, + length: view.byteLength + }); + + if (bytesRead === 0) { + await this.file.close(); + this.controller.close(); + } + + assert.throws(() => byobRequest.respondWithNewView({}), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + byobRequest.respond(bytesRead); + + assert.throws(() => byobRequest.respond(bytesRead), { + code: 'ERR_INVALID_STATE', + }); + assert.throws(() => byobRequest.respondWithNewView(view), { + code: 'ERR_INVALID_STATE', + }); + } + + get type() { return 'bytes'; } + + get autoAllocateChunkSize() { return 1024; } +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + const reader = stream.getReader({ mode: 'byob' }); + + const chunks = []; + let result; + do { + result = await reader.read(Buffer.alloc(100)); + if (result.value !== undefined) + chunks.push(Buffer.from(result.value)); + } while (!result.done); + + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(check, data); + })); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + const chunks = []; + for await (const chunk of stream) + chunks.push(chunk); + + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(check, data); + })); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream) + break; + } + + read(stream).then(common.mustCall()); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream) + throw error; + } + + assert.rejects(read(stream), error); +} + +{ + assert.throws(() => { + Reflect.get(ReadableStreamBYOBRequest.prototype, 'view', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => ReadableStreamBYOBRequest.prototype.respond.call({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + ReadableStreamBYOBRequest.prototype.respondWithNewView.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); +} + +{ + const readable = new ReadableStream({ type: 'bytes' }); + const reader = readable.getReader({ mode: 'byob' }); + reader.releaseLock(); + reader.releaseLock(); + assert.rejects(reader.read(new Uint8Array(10)), { + code: 'ERR_INVALID_STATE', + }); + assert.rejects(reader.cancel(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let controller; + new ReadableStream({ + type: 'bytes', + start(c) { controller = c; } + }); + assert.throws(() => controller.enqueue(1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + controller.close(); + assert.throws(() => controller.enqueue(new Uint8Array(10)), { + code: 'ERR_INVALID_STATE', + }); + assert.throws(() => controller.close(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + +} diff --git a/test/parallel/test-whatwg-readablestream.js b/test/parallel/test-whatwg-readablestream.js new file mode 100644 index 00000000000000..1c18efeec41963 --- /dev/null +++ b/test/parallel/test-whatwg-readablestream.js @@ -0,0 +1,1522 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { + isPromise, +} = require('util/types'); +const { + setImmediate: delay +} = require('timers/promises'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamDefaultController, + ReadableByteStreamController, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + WritableStream, +} = require('stream/web'); + +const { + readableStreamPipeTo, + readableStreamTee, + readableByteStreamControllerConvertPullIntoDescriptor, + readableStreamDefaultControllerEnqueue, + readableByteStreamControllerEnqueue, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableByteStreamControllerClose, + readableByteStreamControllerRespond, +} = require('internal/webstreams/readablestream'); + +const { + kState +} = require('internal/webstreams/util'); + +const { + createReadStream, + readFileSync, +} = require('fs'); +const { + Buffer, +} = require('buffer'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +{ + const r = new ReadableStream(); + assert.strictEqual(typeof r.locked, 'boolean'); + assert.strictEqual(typeof r.cancel, 'function'); + assert.strictEqual(typeof r.getReader, 'function'); + assert.strictEqual(typeof r.pipeThrough, 'function'); + assert.strictEqual(typeof r.pipeTo, 'function'); + assert.strictEqual(typeof r.tee, 'function'); + + ['', null, 'asdf'].forEach((mode) => { + assert.throws(() => r.getReader({ mode }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + }); + + [1, 'asdf'].forEach((options) => { + assert.throws(() => r.getReader(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + assert(!r.locked); + r.getReader(); + assert(r.locked); +} + +{ + const source = { + start: common.mustCall((controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + pull: common.mustCall((controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + cancel: common.mustNotCall(), + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + pull: common.mustCall(async (controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + cancel: common.mustNotCall(), + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall((controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustNotCall(), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustNotCall(), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source, { highWaterMark: 10 }); +} + +{ + // These are silly but they should all work per spec + new ReadableStream(1); + new ReadableStream('hello'); + new ReadableStream(false); + new ReadableStream([]); + new ReadableStream(1, 1); + new ReadableStream(1, 'hello'); + new ReadableStream(1, false); + new ReadableStream(1, []); +} + +['a', {}, false].forEach((size) => { + assert.throws(() => { + new ReadableStream({}, { size }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +['a', {}].forEach((highWaterMark) => { + assert.throws(() => { + new ReadableStream({}, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert.throws(() => { + new ReadableStream({ type: 'bytes' }, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +[-1, NaN].forEach((highWaterMark) => { + assert.throws(() => { + new ReadableStream({}, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert.throws(() => { + new ReadableStream({ type: 'bytes' }, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +{ + new ReadableStream({}, new ByteLengthQueuingStrategy({ highWaterMark: 1 })); + new ReadableStream({}, new CountQueuingStrategy({ highWaterMark: 1 })); +} + +{ + const strategy = new ByteLengthQueuingStrategy({ highWaterMark: 1 }); + assert.strictEqual(strategy.highWaterMark, 1); + assert.strictEqual(strategy.size(new ArrayBuffer(10)), 10); + + const { size } = strategy; + assert.strictEqual(size(new ArrayBuffer(10)), 10); +} + +{ + const strategy = new CountQueuingStrategy({ highWaterMark: 1 }); + assert.strictEqual(strategy.highWaterMark, 1); + assert.strictEqual(strategy.size(new ArrayBuffer(10)), 1); + + const { size } = strategy; + assert.strictEqual(size(new ArrayBuffer(10)), 1); +} + +{ + const r = new ReadableStream({ + async start() { + throw new Error('boom'); + } + }); + + setImmediate(() => { + assert.strictEqual(r[kState].state, 'errored'); + assert.match(r[kState].storedError?.message, /boom/); + }); +} + +{ + const data = Buffer.from('hello'); + const r = new ReadableStream({ + start(controller) { + controller.enqueue(data); + controller.close(); + }, + }); + + (async function read() { + const reader = r.getReader(); + let res = await reader.read(); + if (res.done) return; + const buf = Buffer.from(res.value); + assert.strictEqual(buf.toString(), data.toString()); + res = await reader.read(); + assert(res.done); + })().then(common.mustCall()); +} + +{ + const r = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + (async function read() { + const reader = r.getReader(); + const res = await reader.read(); + assert(res.done); + })().then(common.mustCall()); +} + +assert.throws(() => { + new ReadableStream({ + get start() { throw new Error('boom1'); } + }, { + get size() { throw new Error('boom2'); } + }); +}, /boom2/); + +{ + const stream = new ReadableStream(); + const reader = stream.getReader(); + + assert(stream.locked); + assert.strictEqual(reader[kState].stream, stream); + assert.strictEqual(stream[kState].reader, reader); + + assert.throws(() => stream.getReader(), { + code: 'ERR_INVALID_STATE', + }); + + assert(reader instanceof ReadableStreamDefaultReader); + + assert(isPromise(reader.closed)); + assert.strictEqual(typeof reader.cancel, 'function'); + assert.strictEqual(typeof reader.read, 'function'); + assert.strictEqual(typeof reader.releaseLock, 'function'); + + const read1 = reader.read(); + const read2 = reader.read(); + + // The stream is empty so the read will never settle. + read1.then( + common.mustNotCall(), + common.mustNotCall() + ); + + // The stream is empty so the read will never settle. + read2.then( + common.mustNotCall(), + common.mustNotCall() + ); + + assert.notStrictEqual(read1, read2); + + assert.strictEqual(reader[kState].readRequests.length, 2); + + delay().then(common.mustCall()); + + assert.throws(() => reader.releaseLock(), { + code: 'ERR_INVALID_STATE', + }); + assert(stream.locked); +} + +{ + const stream = new ReadableStream(); + const reader = stream.getReader(); + const closedBefore = reader.closed; + assert(stream.locked); + reader.releaseLock(); + assert(!stream.locked); + const closedAfter = reader.closed; + + assert.strictEqual(closedBefore, closedAfter); + + assert.rejects(reader.read(), { + code: 'ERR_INVALID_STATE', + }); + + assert.rejects(closedBefore, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(Buffer.from('hello')); + } + }); + + const reader = stream.getReader(); + + assert.rejects(stream.cancel(), { + code: 'ERR_INVALID_STATE', + }); + + reader.cancel(); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + assert(!stream.locked); + + const cancel1 = stream.cancel(); + const cancel2 = stream.cancel(); + + assert.notStrictEqual(cancel1, cancel2); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + stream.getReader().releaseLock(); + stream.getReader().releaseLock(); + stream.getReader(); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + stream.getReader(); + + assert.throws(() => stream.getReader(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + reader.closed.then(common.mustCall()); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + assert.notStrictEqual(closedBefore, closedAfter); + + closedBefore.then(common.mustCall()); + assert.rejects(closedAfter, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + }, + }); + + const reader = stream.getReader(); + c.close(); + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + assert.notStrictEqual(closedBefore, closedAfter); + + closedBefore.then(common.mustCall()); + assert.rejects(closedAfter, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + const closed = reader.closed; + + assert.notStrictEqual(cancel1, cancel2); + assert.notStrictEqual(cancel1, closed); + assert.notStrictEqual(cancel2, closed); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + }, + }); + + const reader = stream.getReader(); + c.close(); + + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + const closed = reader.closed; + + assert.notStrictEqual(cancel1, cancel2); + assert.notStrictEqual(cancel1, closed); + assert.notStrictEqual(cancel2, closed); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + const stream = new ReadableStream(); + const cancel1 = stream.cancel(); + const cancel2 = stream.cancel(); + assert.notStrictEqual(cancel1, cancel2); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); + + stream.getReader().read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + stream.getReader().releaseLock(); + const reader = stream.getReader(); + assert.rejects(reader.closed, error); + assert.rejects(reader.read(), error); + assert.rejects(reader.read(), error); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + const reader = stream.getReader(); + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + assert.notStrictEqual(cancel1, cancel2); + assert.rejects(cancel1, error); + assert.rejects(cancel2, error); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + async start(controller) { + throw error; + } + }); + stream.getReader().releaseLock(); + const reader = stream.getReader(); + assert.rejects(reader.closed, error); + assert.rejects(reader.read(), error); + assert.rejects(reader.read(), error); +} + +{ + const buf1 = Buffer.from('hello'); + const buf2 = Buffer.from('there'); + let doClose; + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf1); + controller.enqueue(buf2); + doClose = controller.close.bind(controller); + } + }); + const reader = stream.getReader(); + doClose(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf1); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf2); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + })); + })); +} + +{ + const buf1 = Buffer.from('hello'); + const buf2 = Buffer.from('there'); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf1); + controller.enqueue(buf2); + } + }); + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf1); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf2); + assert(!done); + reader.read().then(common.mustNotCall()); + delay().then(common.mustCall()); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + async function read(stream) { + const reader = stream.getReader(); + assert.deepStrictEqual( + await reader.read(), { value: 'a', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: 'b', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: undefined, done: true }); + } + + Promise.all([ + read(s1), + read(s2), + ]).then(common.mustCall()); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + }, + pull() { throw error; } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(stream.locked); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + const reader1 = s1.getReader(); + const reader2 = s2.getReader(); + + const closed1 = reader1.closed; + const closed2 = reader2.closed; + + assert.notStrictEqual(closed1, closed2); + + assert.rejects(closed1, error); + assert.rejects(closed2, error); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + s2.cancel(); + + async function read(stream, canceled = false) { + const reader = stream.getReader(); + if (!canceled) { + assert.deepStrictEqual( + await reader.read(), { value: 'a', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: 'b', done: false }); + } + assert.deepStrictEqual( + await reader.read(), { value: undefined, done: true }); + } + + Promise.all([ + read(s1), + read(s2, true), + ]).then(common.mustCall()); +} + +{ + const error1 = new Error('boom1'); + const error2 = new Error('boom2'); + + const stream = new ReadableStream({ + cancel(reason) { + assert.deepStrictEqual(reason, [error1, error2]); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + s1.cancel(error1); + s2.cancel(error2); +} + +{ + const error1 = new Error('boom1'); + const error2 = new Error('boom2'); + + const stream = new ReadableStream({ + cancel(reason) { + assert.deepStrictEqual(reason, [error1, error2]); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + s2.cancel(error2); + s1.cancel(error1); +} + +{ + const error = new Error('boom1'); + + const stream = new ReadableStream({ + cancel() { + throw error; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert.rejects(s1.cancel(), error); + assert.rejects(s2.cancel(), error); +} + +{ + const error = new Error('boom1'); + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + c.error(error); + + assert.rejects(s1.cancel(), error); + assert.rejects(s2.cancel(), error); +} + +{ + const error = new Error('boom1'); + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + const reader1 = s1.getReader(); + const reader2 = s2.getReader(); + + assert.rejects(reader1.closed, error); + assert.rejects(reader2.closed, error); + + assert.rejects(reader1.read(), error); + assert.rejects(reader2.read(), error); + + setImmediate(() => c.error(error)); +} + +{ + let pullCount = 0; + const stream = new ReadableStream({ + pull(controller) { + if (pullCount) + controller.enqueue(pullCount); + pullCount++; + }, + }); + + const reader = stream.getReader(); + + queueMicrotask(common.mustCall(() => { + assert.strictEqual(pullCount, 1); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 1); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 2); + assert(!done); + })); + + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, + pull: common.mustCall(), + }); + + stream.getReader().read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + }, + pull: common.mustCall(), + }); + + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'b'); + assert(!done); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + }, + pull: common.mustNotCall(), + }); + + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'b'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + + })); + })); +} + +{ + let res; + let promise; + let calls = 0; + const stream = new ReadableStream({ + pull(controller) { + controller.enqueue(++calls); + promise = new Promise((resolve) => res = resolve); + return promise; + } + }); + + const reader = stream.getReader(); + + (async () => { + await reader.read(); + assert.strictEqual(calls, 1); + await delay(); + assert.strictEqual(calls, 1); + res(); + await delay(); + assert.strictEqual(calls, 2); + })().then(common.mustCall()); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + }, + pull: common.mustCall(4), + }, { + highWaterMark: Infinity, + size() { return 1; } + }); + + const reader = stream.getReader(); + (async () => { + await delay(); + await reader.read(); + await reader.read(); + await reader.read(); + })().then(common.mustCall()); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + controller.close(); + }, + pull: common.mustNotCall(), + }, { + highWaterMark: Infinity, + size() { return 1; } + }); + + const reader = stream.getReader(); + (async () => { + await delay(); + await reader.read(); + await reader.read(); + await reader.read(); + })().then(common.mustCall()); +} + +{ + let calls = 0; + let res; + const ready = new Promise((resolve) => res = resolve); + + new ReadableStream({ + pull(controller) { + controller.enqueue(++calls); + if (calls === 4) + res(); + } + }, { + size() { return 1; }, + highWaterMark: 4 + }); + + ready.then(common.mustCall(() => { + assert.strictEqual(calls, 4); + })); +} + +{ + const stream = new ReadableStream({ + pull: common.mustCall((controller) => controller.close()) + }); + + const reader = stream.getReader(); + + reader.closed.then(common.mustCall()); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + pull: common.mustCall((controller) => controller.error(error)) + }); + + const reader = stream.getReader(); + + assert.rejects(reader.closed, error); +} + +{ + const error = new Error('boom'); + const error2 = new Error('boom2'); + const stream = new ReadableStream({ + pull: common.mustCall((controller) => { + controller.error(error); + throw error2; + }) + }); + + const reader = stream.getReader(); + + assert.rejects(reader.closed, error); +} + +{ + let startCalled = false; + new ReadableStream({ + start: common.mustCall((controller) => { + controller.enqueue('a'); + controller.close(); + assert.throws(() => controller.enqueue('b'), { + code: 'ERR_INVALID_STATE' + }); + startCalled = true; + }) + }); + assert(startCalled); +} + +{ + let startCalled = false; + new ReadableStream({ + start: common.mustCall((controller) => { + controller.close(); + assert.throws(() => controller.enqueue('b'), { + code: 'ERR_INVALID_STATE' + }); + startCalled = true; + }) + }); + assert(startCalled); +} + +{ + class Source { + startCalled = false; + pullCalled = false; + cancelCalled = false; + + start(controller) { + assert.strictEqual(this, source); + this.startCalled = true; + controller.enqueue('a'); + } + + pull() { + assert.strictEqual(this, source); + this.pullCalled = true; + } + + cancel() { + assert.strictEqual(this, source); + this.cancelCalled = true; + } + } + + const source = new Source(); + + const stream = new ReadableStream(source); + const reader = stream.getReader(); + + (async () => { + await reader.read(); + reader.releaseLock(); + stream.cancel(); + assert(source.startCalled); + assert(source.pullCalled); + assert(source.cancelCalled); + })().then(common.mustCall()); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 10); + controller.close(); + assert.strictEqual(controller.desiredSize, 0); + startCalled = true; + } + }, { + highWaterMark: 10 + }); + assert(startCalled); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 10); + controller.error(); + assert.strictEqual(controller.desiredSize, null); + startCalled = true; + } + }, { + highWaterMark: 10 + }); + assert(startCalled); +} + +{ + class Foo extends ReadableStream {} + const foo = new Foo(); + foo.getReader(); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 1); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, 0); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -1); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -2); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -3); + startCalled = true; + } + }); + assert(startCalled); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const reader = stream.getReader(); + + (async () => { + assert.strictEqual(c.desiredSize, 1); + c.enqueue(1); + assert.strictEqual(c.desiredSize, 0); + await reader.read(); + assert.strictEqual(c.desiredSize, 1); + c.enqueue(1); + c.enqueue(1); + assert.strictEqual(c.desiredSize, -1); + await reader.read(); + assert.strictEqual(c.desiredSize, 0); + await reader.read(); + assert.strictEqual(c.desiredSize, 1); + })().then(common.mustCall()); +} + +{ + let c; + new ReadableStream({ + start(controller) { + c = controller; + } + }); + assert(c instanceof ReadableStreamDefaultController); + assert.strictEqual(typeof c.desiredSize, 'number'); + assert.strictEqual(typeof c.enqueue, 'function'); + assert.strictEqual(typeof c.close, 'function'); + assert.strictEqual(typeof c.error, 'function'); +} + +class Source { + constructor() { + this.cancelCalled = false; + } + + start(controller) { + this.stream = createReadStream(__filename); + this.stream.on('data', (chunk) => { + controller.enqueue(chunk); + }); + this.stream.once('end', () => { + if (!this.cancelCalled) + controller.close(); + }); + this.stream.once('error', (error) => { + controller.error(error); + }); + } + + cancel() { + this.cancelCalled = true; + } +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + const reader = stream.getReader(); + const chunks = []; + let read = await reader.read(); + while (!read.done) { + chunks.push(Buffer.from(read.value)); + read = await reader.read(); + } + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(data, check); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + const chunks = []; + for await (const chunk of stream) + chunks.push(chunk); + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(data, check); + + assert.strictEqual(stream[kState].state, 'closed'); + assert(!stream.locked); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + [1, false, ''].forEach((options) => { + assert.throws(() => stream.values(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: true })) + return; + } + + read(stream).then(common.mustCall((data) => { + assert.strictEqual(stream[kState].state, 'readable'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: false })) + return; + } + + read(stream).then(common.mustCall((data) => { + assert.strictEqual(stream[kState].state, 'closed'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: true })) + throw error; + } + + assert.rejects(read(stream), error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'readable'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: false })) + throw error; + } + + assert.rejects(read(stream), error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'closed'); + })); +} + +{ + assert.throws(() => Reflect.get(ReadableStream.prototype, 'locked', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStream.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype.getReader.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype.tee.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype[kTransfer].call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamDefaultReader.prototype.read.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamDefaultReader.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => { + return Reflect.get(ReadableStreamDefaultReader.prototype, 'closed'); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableStreamDefaultReader.prototype.releaseLock.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamBYOBReader.prototype.read.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableStreamBYOBReader.prototype.releaseLock.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => { + return Reflect.get(ReadableStreamBYOBReader.prototype, 'closed'); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamBYOBReader.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(ReadableByteStreamController.prototype, 'byobRequest', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + Reflect.get(ReadableByteStreamController.prototype, 'desiredSize', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.close.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.enqueue.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.error.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => new ReadableStreamBYOBRequest(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); + + assert.throws(() => new ReadableStreamDefaultController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); + + assert.throws(() => new ReadableByteStreamController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); +} + +{ + let controller; + const readable = new ReadableStream({ + start(c) { controller = c; } + }); + + assert.strictEqual( + inspect(readable), + 'ReadableStream { locked: false, state: \'readable\' }'); + assert.strictEqual( + inspect(readable, { depth: null }), + 'ReadableStream { locked: false, state: \'readable\' }'); + assert.strictEqual( + inspect(readable, { depth: 0 }), + 'ReadableStream [Object]'); + + assert.strictEqual( + inspect(controller), + 'ReadableStreamDefaultController {}'); + assert.strictEqual( + inspect(controller, { depth: null }), + 'ReadableStreamDefaultController {}'); + assert.strictEqual( + inspect(controller, { depth: 0 }), + 'ReadableStreamDefaultController {}'); + + const reader = readable.getReader(); + + assert.match( + inspect(reader), + /ReadableStreamDefaultReader/); + assert.match( + inspect(reader, { depth: null }), + /ReadableStreamDefaultReader/); + assert.match( + inspect(reader, { depth: 0 }), + /ReadableStreamDefaultReader/); + + assert.rejects(readableStreamPipeTo(1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.rejects(readableStreamPipeTo(new ReadableStream(), 1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.rejects( + readableStreamPipeTo( + new ReadableStream(), + new WritableStream(), + false, + false, + false, + {}), + { + code: 'ERR_INVALID_ARG_TYPE', + }); +} + +{ + const readable = new ReadableStream(); + const reader = readable.getReader(); + reader.releaseLock(); + reader.releaseLock(); + assert.rejects(reader.read(), { + code: 'ERR_INVALID_STATE', + }); + assert.rejects(reader.cancel(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + // Test tee() cloneForBranch2 argument + const readable = new ReadableStream({ + start(controller) { + controller.enqueue('hello'); + } + }); + const [r1, r2] = readableStreamTee(readable, true); + r1.getReader().read().then( + common.mustCall(({ value }) => assert.strictEqual(value, 'hello'))); + r2.getReader().read().then( + common.mustCall(({ value }) => assert.strictEqual(value, 'hello'))); +} + +{ + assert.throws(() => { + readableByteStreamControllerConvertPullIntoDescriptor({ + bytesFilled: 10, + byteLength: 5 + }); + }, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let controller; + const readable = new ReadableStream({ + start(c) { controller = c; } + }); + + controller[kState].pendingPullIntos = [{}]; + assert.throws(() => readableByteStreamControllerRespond(controller, 0), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + readable.cancel().then(common.mustCall()); + + assert.throws(() => readableByteStreamControllerRespond(controller, 1), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert(!readableStreamDefaultControllerCanCloseOrEnqueue(controller)); + readableStreamDefaultControllerEnqueue(controller); + readableByteStreamControllerClose(controller); + readableByteStreamControllerEnqueue(controller); +} diff --git a/test/parallel/test-whatwg-transformstream.js b/test/parallel/test-whatwg-transformstream.js new file mode 100644 index 00000000000000..0cbc76cc4ce8c0 --- /dev/null +++ b/test/parallel/test-whatwg-transformstream.js @@ -0,0 +1,188 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + ReadableStream, + TransformStream, + TransformStreamDefaultController, +} = require('stream/web'); + +const { + createReadStream, + readFileSync, +} = require('fs'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +assert.throws(() => new TransformStream({ readableType: 1 }), { + code: 'ERR_INVALID_ARG_VALUE', +}); +assert.throws(() => new TransformStream({ writableType: 1 }), { + code: 'ERR_INVALID_ARG_VALUE', +}); + + +{ + const stream = new TransformStream(); + + async function test(stream) { + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + + const { 1: result } = await Promise.all([ + writer.write('hello'), + reader.read(), + ]); + + assert.strictEqual(result.value, 'hello'); + } + + test(stream).then(common.mustCall()); +} + +class Transform { + start(controller) { + this.started = true; + } + + async transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } + + async flush() { + this.flushed = true; + } +} + +{ + const transform = new Transform(); + const stream = new TransformStream(transform); + assert(transform.started); + + async function test(stream) { + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + + const { 1: result } = await Promise.all([ + writer.write('hello'), + reader.read(), + ]); + + assert.strictEqual(result.value, 'HELLO'); + + await writer.close(); + } + + test(stream).then(common.mustCall(() => { + assert(transform.flushed); + })); +} + +class Source { + constructor() { + this.cancelCalled = false; + } + + start(controller) { + this.stream = createReadStream(__filename); + this.stream.on('data', (chunk) => { + controller.enqueue(chunk.toString()); + }); + this.stream.once('end', () => { + if (!this.cancelCalled) + controller.close(); + }); + this.stream.once('error', (error) => { + controller.error(error); + }); + } + + cancel() { + this.cancelCalled = true; + } +} + +{ + const instream = new ReadableStream(new Source()); + const tstream = new TransformStream(new Transform()); + const r = instream.pipeThrough(tstream); + + async function read(stream) { + let res = ''; + for await (const chunk of stream) + res += chunk; + return res; + } + + read(r).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.strictEqual(check.toString().toUpperCase(), data); + })); +} + +{ + assert.throws(() => Reflect.get(TransformStream.prototype, 'readable', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => Reflect.get(TransformStream.prototype, 'writable', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => TransformStream.prototype[kTransfer]({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(TransformStreamDefaultController.prototype, 'desiredSize', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.enqueue({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.error({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.terminate({}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => new TransformStreamDefaultController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); +} + +{ + let controller; + const transform = new TransformStream({ + start(c) { + controller = c; + } + }); + + assert.match(inspect(transform), /TransformStream/); + assert.match(inspect(transform, { depth: null }), /TransformStream/); + assert.match(inspect(transform, { depth: 0 }), /TransformStream \[/); + + assert.match(inspect(controller), /TransformStreamDefaultController/); + assert.match( + inspect(controller, { depth: null }), + /TransformStreamDefaultController/); + assert.match( + inspect(controller, { depth: 0 }), + /TransformStreamDefaultController \[/); +} diff --git a/test/parallel/test-whatwg-webstreams-coverage.js b/test/parallel/test-whatwg-webstreams-coverage.js new file mode 100644 index 00000000000000..f0036723b05977 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-coverage.js @@ -0,0 +1,70 @@ +// Flags: --no-warnings --expose-internals +'use strict'; + +require('../common'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +} = require('stream/web'); + +const { + inspect, +} = require('util'); + +const { + isPromisePending, +} = require('internal/webstreams/util'); + +const assert = require('assert'); + +assert(!isPromisePending({})); +assert(!isPromisePending(Promise.resolve())); +assert(isPromisePending(new Promise(() => {}))); + +// Brand checking works +assert.throws(() => { + Reflect.get(ByteLengthQueuingStrategy.prototype, 'highWaterMark', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(ByteLengthQueuingStrategy.prototype, 'size', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(CountQueuingStrategy.prototype, 'highWaterMark', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(CountQueuingStrategy.prototype, 'size', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +// Custom Inspect Works + +{ + const strategy = new CountQueuingStrategy({ highWaterMark: 1 }); + + assert.strictEqual( + inspect(strategy, { depth: null }), + 'CountQueuingStrategy { highWaterMark: 1 }'); + + assert.strictEqual( + inspect(strategy), + 'CountQueuingStrategy { highWaterMark: 1 }'); + + assert.strictEqual( + inspect(strategy, { depth: 0 }), + 'CountQueuingStrategy [Object]'); + + assert.strictEqual( + inspect(new ByteLengthQueuingStrategy({ highWaterMark: 1 })), + 'ByteLengthQueuingStrategy { highWaterMark: 1 }'); +} diff --git a/test/parallel/test-whatwg-webstreams-transfer.js b/test/parallel/test-whatwg-webstreams-transfer.js new file mode 100644 index 00000000000000..2b7333d9c6fbf7 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-transfer.js @@ -0,0 +1,503 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); + +const { + ReadableStream, + WritableStream, + TransformStream, +} = require('stream/web'); + +const { + Worker +} = require('worker_threads'); + +const { + isReadableStream, +} = require('internal/webstreams/readablestream'); + +const { + isWritableStream, +} = require('internal/webstreams/writablestream'); + +const { + isTransformStream, +} = require('internal/webstreams/transformstream'); + +const { + makeTransferable, + kClone, + kTransfer, + kDeserialize, +} = require('internal/worker/js_transferable'); + +const assert = require('assert'); + +const theData = 'hello'; + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // This test takes the ReadableStream and transfers it to the + // port1 first, then again to port2, which reads the data. + // Internally, this sets up a pipelined data flow that is + // important to understand in case this test fails.. + // + // Specifically: + // + // 1. We start with ReadableStream R1, + // 2. Calling port2.postMessage causes a new internal WritableStream W1 + // and a new ReadableStream R2 to be created, both of which are coupled + // to each other via a pair of MessagePorts P1 and P2. + // 3. ReadableStream R2 is passed to the port1.onmessage callback as the + // data property of the MessageEvent, and R1 is configured to pipeTo W1. + // 4. Within port1.onmessage, we transfer ReadableStream R2 to port1, which + // creates a new internal WritableStream W2 and a new ReadableStream R3, + // both of which are coupled to each other via a pair of MessagePorts + // P3 and P4. + // 5. ReadableStream R3 is passed to the port2.onmessage callback as the + // data property of the MessageEvent, and R2 is configured to pipeTo W2. + // 6. Once the reader is attached to R3 in the port2.onmessage callback, + // a message is sent along the path: R3 -> P4 -> P3 -> R2 -> P2 -> P1 -> R1 + // to begin pulling the data. The data is then pushed along the pipeline + // R1 -> W1 -> P1 -> P2 -> R2 -> W2 -> P3 -> P4 -> R3 + // 7. The MessagePorts P1, P2, P3, and P4 serve as a control channel for + // passing data and control instructions, potentially across realms, + // to the other ReadableStream and WritableStream instances. + // + // If this test experiences timeouts (hangs without finishing), it's most + // likely because the control instructions are somehow broken and the + // MessagePorts are not being closed properly or it could be caused by + // failing the close R1's controller which signals the end of the data + // flow. + + const readable = new ReadableStream({ + start: common.mustCall((controller) => { + controller.enqueue(theData); + controller.close(); + }), + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isReadableStream(data)); + + const reader = data.getReader(); + reader.read().then(common.mustCall((chunk) => { + assert.deepStrictEqual(chunk, { done: false, value: theData }); + })); + + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isReadableStream(data)); + assert(!data.locked); + port1.postMessage(data, [data]); + assert(data.locked); + }); + + assert.throws(() => port2.postMessage(readable), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(readable, [readable]); + assert(readable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // Like the ReadableStream test above, this sets up a pipeline + // through which the data flows... + // + // We start with WritableStream W1, which is transfered to port1. + // Doing so creates an internal ReadableStream R1 and WritableStream W2, + // which are coupled together with MessagePorts P1 and P2. + // The port1.onmessage callback receives WritableStream W2 and + // immediately transfers that to port2. Doing so creates an internal + // ReadableStream R2 and WritableStream W3, which are coupled together + // with MessagePorts P3 and P4. WritableStream W3 is handed off to + // port2.onmessage. + // + // When the writer on port2.onmessage writes the chunk of data, it + // gets passed along the pipeline: + // W3 -> P4 -> P3 -> R2 -> W2 -> P2 -> P1 -> R1 -> W1 + + const writable = new WritableStream({ + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, theData); + }), + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isWritableStream(data)); + assert(!data.locked); + const writer = data.getWriter(); + writer.write(theData).then(common.mustCall()); + writer.close(); + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isWritableStream(data)); + assert(!data.locked); + port1.postMessage(data, [data]); + assert(data.locked); + }); + + assert.throws(() => port2.postMessage(writable), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(writable, [writable]); + assert(writable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // The data flow here is actually quite complicated, and is a combination + // of the WritableStream and ReadableStream examples above. + // + // We start with TransformStream T1, which creates ReadableStream R1, + // and WritableStream W1. + // + // When T1 is transfered to port1.onmessage, R1 and W1 are individually + // transfered. + // + // When R1 is transfered, it creates internal WritableStream W2, and + // new ReadableStream R2, coupled together via MessagePorts P1 and P2. + // + // When W1 is transfered, it creates internal ReadableStream R3 and + // new WritableStream W3, coupled together via MessagePorts P3 and P4. + // + // A new TransformStream T2 is created that owns ReadableStream R2 and + // WritableStream W3. The port1.onmessage callback immediately transfers + // that to port2.onmessage. + // + // When T2 is transfered, R2 and W3 are individually transfered. + // + // When R2 is transfered, it creates internal WritableStream W4, and + // ReadableStream R4, coupled together via MessagePorts P5 and P6. + // + // When W3 is transfered, it creates internal ReadableStream R5, and + // WritableStream W5, coupled together via MessagePorts P7 and P8. + // + // A new TransformStream T3 is created that owns ReadableStream R4 and + // WritableStream W5. + // + // port1.onmessage then writes a chunk of data. That chunk of data + // flows through the pipeline to T1: + // + // W5 -> P8 -> P7 -> R5 -> W3 -> P4 -> P3 -> R3 -> W1 -> T1 + // + // T1 performs the transformation, then pushes the chunk back out + // along the pipeline: + // + // T1 -> R1 -> W2 -> P1 -> P2 -> R2 -> W4 -> P5 -> P6 -> R4 + + const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isTransformStream(data)); + const writer = data.writable.getWriter(); + const reader = data.readable.getReader(); + Promise.all([ + writer.write(theData), + writer.close(), + reader.read().then(common.mustCall((result) => { + assert(!result.done); + assert.strictEqual(result.value, theData.toUpperCase()); + })), + reader.read().then(common.mustCall((result) => { + assert(result.done); + })), + ]).then(common.mustCall()); + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isTransformStream(data)); + assert(!data.readable.locked); + assert(!data.writable.locked); + port1.postMessage(data, [data]); + assert(data.readable.locked); + assert(data.writable.locked); + }); + + assert.throws(() => port2.postMessage(transform), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(transform, [transform]); + assert(transform.readable.locked); + assert(transform.writable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + let controller; + + const readable = new ReadableStream({ + start(c) { controller = c; }, + + cancel: common.mustCall((error) => { + assert.strictEqual(error.code, 25); // DataCloneError + }), + }); + + port1.onmessage = ({ data }) => { + const reader = data.getReader(); + assert.rejects(reader.read(), { + code: 25, // DataCloneError + }); + port1.close(); + }; + + port2.postMessage(readable, [readable]); + + const notActuallyTransferable = makeTransferable({ + [kClone]() { + return { + data: {}, + deserializeInfo: 'nothing that will work', + }; + }, + [kDeserialize]: common.mustNotCall(), + }); + + controller.enqueue(notActuallyTransferable); +} + +{ + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => { + assert.strictEqual(error.code, 25); + assert.strictEqual(error.name, 'DataCloneError'); + }); + }) + }; + + const writable = new WritableStream(source); + + const notActuallyTransferable = makeTransferable({ + [kClone]() { + return { + data: {}, + deserializeInfo: 'nothing that will work', + }; + }, + [kDeserialize]: common.mustNotCall(), + }); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + assert.rejects(writer.closed, { + code: 25, + name: 'DataCloneError', + }); + + writer.write(notActuallyTransferable).then(common.mustCall()); + + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const error = new Error('boom'); + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((reason) => { + process.nextTick(() => { + assert.deepStrictEqual(reason, error); + + // Reason is a clone of the original error. + assert.notStrictEqual(reason, error); + }); + }), + }; + + const writable = new WritableStream(source); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + assert.rejects(writer.closed, error); + + writer.abort(error).then(common.mustCall()); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => assert.strictEqual(error.code, 25)); + }) + }; + + const writable = new WritableStream(source); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + const m = new WebAssembly.Memory({ initial: 1 }); + + assert.rejects(writer.abort(m), { + code: 25 + }); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + // Verify that the communication works across worker threads... + + const worker = new Worker(` + const { + isReadableStream, + } = require('internal/webstreams/readablestream'); + + const { + parentPort, + } = require('worker_threads'); + + const assert = require('assert'); + + const tracker = new assert.CallTracker(); + process.on('exit', () => { + tracker.verify(); + }); + + parentPort.onmessage = tracker.calls(({ data }) => { + assert(isReadableStream(data)); + const reader = data.getReader(); + reader.read().then(tracker.calls((result) => { + assert(!result.done); + assert(result.value instanceof Uint8Array); + })); + parentPort.close(); + }); + parentPort.onmessageerror = () => assert.fail('should not be called'); + `, { eval: true }); + + worker.on('error', common.mustNotCall()); + + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array(10)); + controller.close(); + } + }); + + worker.postMessage(readable, [readable]); +} + +{ + const source = { + cancel: common.mustCall(), + }; + + const readable = new ReadableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + data.cancel().then(common.mustCall()); + port1.close(); + }); + + port2.postMessage(readable, [readable]); +} + +{ + const source = { + cancel: common.mustCall((error) => { + process.nextTick(() => assert(error.code, 25)); + }), + }; + + const readable = new ReadableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + const m = new WebAssembly.Memory({ initial: 1 }); + + const reader = data.getReader(); + + const cancel = reader.cancel(m); + + reader.closed.then(common.mustCall()); + + assert.rejects(cancel, { + code: 25 + }); + + port1.close(); + }); + + port2.postMessage(readable, [readable]); +} + +{ + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => { + assert.strictEqual(error.code, 25); + }); + }), + }; + + const writable = new WritableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + const m = new WebAssembly.Memory({ initial: 1 }); + const writer = data.getWriter(); + const write = writer.write(m); + assert.rejects(write, { code: 25 }); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const readable = new ReadableStream(); + readable.getReader(); + assert.throws(() => readable[kTransfer](), { + code: 25 + }); + + const writable = new WritableStream(); + writable.getWriter(); + assert.throws(() => writable[kTransfer](), { + code: 25 + }); +} diff --git a/test/parallel/test-whatwg-writablestream.js b/test/parallel/test-whatwg-writablestream.js new file mode 100644 index 00000000000000..91e3c098462949 --- /dev/null +++ b/test/parallel/test-whatwg-writablestream.js @@ -0,0 +1,260 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + WritableStream, + WritableStreamDefaultController, + WritableStreamDefaultWriter, + CountQueuingStrategy, +} = require('stream/web'); + +const { + kState, +} = require('internal/webstreams/util'); + +const { + isPromise, +} = require('util/types'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +class Sink { + constructor() { + this.chunks = []; + } + + start() { + this.started = true; + } + + write(chunk) { + this.chunks.push(chunk); + } + + close() { + this.closed = true; + } + + abort() { + this.aborted = true; + } +} + +{ + const stream = new WritableStream(); + + assert(stream[kState].controller instanceof WritableStreamDefaultController); + assert(!stream.locked); + + assert.strictEqual(typeof stream.abort, 'function'); + assert.strictEqual(typeof stream.close, 'function'); + assert.strictEqual(typeof stream.getWriter, 'function'); +} + +[1, false, ''].forEach((type) => { + assert.throws(() => new WritableStream({ type }), { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +['a', {}].forEach((highWaterMark) => { + assert.throws(() => new WritableStream({}, { highWaterMark }), { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +['a', false, {}].forEach((size) => { + assert.throws(() => new WritableStream({}, { size }), { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +{ + new WritableStream({}, 1); + new WritableStream({}, 'a'); + new WritableStream({}, null); +} + +{ + const sink = new Sink(); + const stream = new WritableStream( + sink, + new CountQueuingStrategy({ highWaterMark: 1 })); + + assert(!stream.locked); + const writer = stream.getWriter(); + assert(stream.locked); + assert(writer instanceof WritableStreamDefaultWriter); + + assert(isPromise(writer.closed)); + assert(isPromise(writer.ready)); + assert(typeof writer.desiredSize, 'number'); + assert(typeof writer.abort, 'function'); + assert(typeof writer.close, 'function'); + assert(typeof writer.releaseLock, 'function'); + assert(typeof writer.write, 'function'); + + writer.releaseLock(); + assert(!stream.locked); + + const writer2 = stream.getWriter(); + + assert(sink.started); + + writer2.closed.then(common.mustCall()); + writer2.ready.then(common.mustCall()); + + writer2.close().then(common.mustCall(() => { + assert.strict(sink.closed); + })); +} + +{ + const sink = new Sink(); + + const stream = new WritableStream( + sink, + new CountQueuingStrategy({ highWaterMark: 1 })); + + const error = new Error('boom'); + + const writer = stream.getWriter(); + + assert.rejects(writer.closed, error); + + writer.abort(error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'errored'); + assert(sink.aborted); + })); +} + +{ + const sink = new Sink(); + + const stream = new WritableStream( + sink, { highWaterMark: 1 } + ); + + async function write(stream) { + const writer = stream.getWriter(); + const p = writer.write('hello'); + assert.strictEqual(writer.desiredSize, 0); + await p; + assert.strictEqual(writer.desiredSize, 1); + } + + write(stream).then(common.mustCall(() => { + assert.deepStrictEqual(['hello'], sink.chunks); + })); +} + +{ + assert.throws(() => Reflect.get(WritableStream.prototype, 'locked', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => WritableStream.prototype.abort({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => WritableStream.prototype.close({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStream.prototype.getWriter.call(), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStream.prototype[kTransfer].call(), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects( + Reflect.get(WritableStreamDefaultWriter.prototype, 'closed'), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects( + Reflect.get(WritableStreamDefaultWriter.prototype, 'ready'), { + code: 'ERR_INVALID_THIS', + }); + assert.throws( + () => Reflect.get(WritableStreamDefaultWriter.prototype, 'desiredSize'), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.abort({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.close({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.write({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStreamDefaultWriter.prototype.releaseLock({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(WritableStreamDefaultController.prototype, 'abortReason', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(WritableStreamDefaultController.prototype, 'signal', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + WritableStreamDefaultController.prototype.error({}); + }, { + code: 'ERR_INVALID_THIS', + }); +} + +{ + let controller; + const writable = new WritableStream({ + start(c) { controller = c; } + }); + assert.strictEqual( + inspect(writable), + 'WritableStream { locked: false, state: \'writable\' }'); + assert.strictEqual( + inspect(writable, { depth: null }), + 'WritableStream { locked: false, state: \'writable\' }'); + assert.strictEqual( + inspect(writable, { depth: 0 }), + 'WritableStream [Object]'); + + const writer = writable.getWriter(); + assert.match( + inspect(writer), + /WritableStreamDefaultWriter/); + assert.match( + inspect(writer, { depth: null }), + /WritableStreamDefaultWriter/); + assert.match( + inspect(writer, { depth: 0 }), + /WritableStreamDefaultWriter \[/); + + assert.match( + inspect(controller), + /WritableStreamDefaultController/); + assert.match( + inspect(controller, { depth: null }), + /WritableStreamDefaultController/); + assert.match( + inspect(controller, { depth: 0 }), + /WritableStreamDefaultController \[/); + + writer.abort(new Error('boom')); + + assert.strictEqual(writer.desiredSize, null); + setImmediate(() => assert.strictEqual(writer.desiredSize, null)); +} diff --git a/test/wpt/status/streams.json b/test/wpt/status/streams.json index 0967ef424bce67..c1b80d69dd8cd3 100644 --- a/test/wpt/status/streams.json +++ b/test/wpt/status/streams.json @@ -1 +1,11 @@ -{} +{ + "queuing-strategies-size-function-per-global.window.js": { + "skip": "Browser-specific test" + }, + "transferable/deserialize-error.window.js": { + "skip": "Browser-specific test" + }, + "readable-byte-streams/bad-buffers-and-views.any.js": { + "fail": "TODO: implement detached ArrayBuffer support" + } +} diff --git a/test/wpt/test-streams.js b/test/wpt/test-streams.js index 6a64f241c10e2d..987676d8c49125 100644 --- a/test/wpt/test-streams.js +++ b/test/wpt/test-streams.js @@ -10,7 +10,7 @@ runner.setFlags(['--expose-internals']); // Set a script that will be executed in the worker before running the tests. runner.setInitScript(` - const { + let { ReadableStream, ReadableStreamDefaultReader, ReadableStreamBYOBReader, @@ -29,19 +29,111 @@ runner.setInitScript(` const { internalBinding } = require('internal/test/binding'); const { DOMException } = internalBinding('messaging'); global.DOMException = DOMException; - global.ReadableStream = ReadableStream; - global.ReadableStreamDefaultReader = ReadableStreamDefaultReader; - global.ReadableStreamBYOBReader = ReadableStreamBYOBReader; - global.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest; - global.ReadableByteStreamController = ReadableByteStreamController; - global.ReadableStreamDefaultController = ReadableStreamDefaultController; - global.TransformStream = TransformStream; - global.TransformStreamDefaultController = TransformStreamDefaultController; - global.WritableStream = WritableStream; - global.WritableStreamDefaultWriter = WritableStreamDefaultWriter; - global.WritableStreamDefaultController = WritableStreamDefaultController; - global.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy; - global.CountQueuingStrategy = CountQueuingStrategy; + + Object.defineProperties(global, { + ReadableStream: { + value: ReadableStream, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamDefaultReader: { + value: ReadableStreamDefaultReader, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamBYOBReader: { + value: ReadableStreamBYOBReader, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamBYOBRequest: { + value: ReadableStreamBYOBRequest, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableByteStreamController: { + value: ReadableByteStreamController, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamDefaultController: { + value: ReadableStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + TransformStream: { + value: TransformStream, + configurable: true, + writable: true, + enumerable: false, + }, + TransformStreamDefaultController: { + value: TransformStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStream: { + value: WritableStream, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStreamDefaultWriter: { + value: WritableStreamDefaultWriter, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStreamDefaultController: { + value: WritableStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + ByteLengthQueuingStrategy: { + value: ByteLengthQueuingStrategy, + configurable: true, + writable: true, + enumerable: false, + }, + CountQueuingStrategy: { + value: CountQueuingStrategy, + configurable: true, + writable: true, + enumerable: false, + }, + }); + + // Simulate global postMessage for enqueue-with-detached-buffer.window.js + function postMessage(value, origin, transferList) { + const mc = new MessageChannel(); + mc.port1.postMessage(value, transferList); + mc.port2.close(); + } + + // TODO(@jasnell): This is a bit of a hack to get the idl harness test + // working. Later we should investigate a better approach. + // See: https://github.com/nodejs/node/pull/39062#discussion_r659383373 + Object.defineProperties(global, { + DedicatedWorkerGlobalScope: { + get() { + // Pretend that we're a DedicatedWorker, but *only* for the + // IDL harness. For everything else, keep the JavaScript shell + // environment. + if (new Error().stack.includes('idlharness.js')) + return global.constructor; + else + return function() {}; + } + } + }); `); runner.runJsTests(); diff --git a/tools/doc/type-parser.mjs b/tools/doc/type-parser.mjs index c2586a43254ecb..e3b8ad0ffac4bf 100644 --- a/tools/doc/type-parser.mjs +++ b/tools/doc/type-parser.mjs @@ -226,6 +226,33 @@ const customTypesMap = { 'X509Certificate': 'crypto.html#crypto_class_x509certificate', 'zlib options': 'zlib.html#zlib_class_options', + + 'ReadableStream': + 'webstreams.md#webstreamsapi_class_readablestream', + 'ReadableStreamDefaultReader': + 'webstreams.md#webstreamsapi_class_readablestreamdefaultreader', + 'ReadableStreamBYOBReader': + 'webstreams.md#webstreamsapi_class_readablestreambyobreader', + 'ReadableStreamDefaultController': + 'webstreams.md#webstreamsapi_class_readablestreamdefaultcontroller', + 'ReadableByteStreamController': + 'webstreams.md#webstreamsapi_class_readablebytestreamcontroller', + 'ReadableStreamBYOBRequest': + 'webstreams.md#webstreamsapi_class_readablestreambyobrequest', + 'WritableStream': + 'webstreams.md#webstreamsapi_class_writablestream', + 'WritableStreamDefaultWriter': + 'webstreams.md#webstreamsapi_class_writablestreamdefaultwriter', + 'WritableStreamDefaultController': + 'webstreams.md#webstreamsapi_class_writablestreamdefaultcontroller', + 'TransformStream': + 'webstreams.md#webstreamsapi_class_transformstream', + 'TransformStreamDefaultController': + 'webstreams.md#webstreamsapi_class_transformstreamdefaultcontroller', + 'ByteLengthQueuingStrategy': + 'webstreams.md#webstreamsapi_class_bytelengthqueuingstrategy', + 'CountQueuingStrategy': + 'webstreams.md#webstreamsapi_class_countqueuingstrategy', }; const arrayPart = /(?:\[])+$/; diff --git a/typings/primordials.d.ts b/typings/primordials.d.ts index 0436e92b1d9b53..beed1d7b83c4c9 100644 --- a/typings/primordials.d.ts +++ b/typings/primordials.d.ts @@ -1,3 +1,5 @@ +import { AsyncIterator } from "internal/webstreams/util"; + type UncurryThis unknown> = (self: ThisParameterType, ...args: Parameters) => ReturnType; type UncurryThisStaticApply unknown> = @@ -9,15 +11,15 @@ type StaticApply unknown> = * Primordials are a way to safely use globals without fear of global mutation * Generally, this means removing `this` parameter usage and instead using * a regular parameter: - * + * * @example - * + * * ```js * 'thing'.startsWith('hello'); * ``` - * + * * becomes - * + * * ```js * primordials.StringPrototypeStartsWith('thing', 'hello') * ``` @@ -142,6 +144,7 @@ declare namespace primordials { export const ArrayBufferPrototype: typeof ArrayBuffer.prototype export const ArrayBufferIsView: typeof ArrayBuffer.isView export const ArrayBufferPrototypeSlice: UncurryThis + export const AsyncIteratorPrototype: UncurryThis export const BigInt: typeof globalThis.BigInt; export const BigIntLength: typeof BigInt.length export const BigIntName: typeof BigInt.name @@ -522,5 +525,5 @@ declare namespace primordials { export const PromiseAny: typeof Promise.any export const PromisePrototypeThen: UncurryThis export const PromisePrototypeCatch: UncurryThis - export const PromisePrototypeFinally: UncurryThis + export const PromisePrototypeFinally: UncurryThis }