diff --git a/.github/workflows/client.yml b/.github/workflows/upload-client.yml similarity index 89% rename from .github/workflows/client.yml rename to .github/workflows/upload-client.yml index 953d5c8f8..82913e0ca 100644 --- a/.github/workflows/client.yml +++ b/.github/workflows/upload-client.yml @@ -1,4 +1,4 @@ -name: Client +name: Upload Client env: CI: true FORCE_COLOR: 1 @@ -28,5 +28,6 @@ jobs: node-version: 18 cache: 'pnpm' - run: pnpm install + - run: pnpm -r --filter @web3-storage/upload-client run build - run: pnpm -r --filter @web3-storage/upload-client run lint - run: pnpm -r --filter @web3-storage/upload-client run test diff --git a/packages/upload-client/README.md b/packages/upload-client/README.md new file mode 100644 index 000000000..945bcb0f0 --- /dev/null +++ b/packages/upload-client/README.md @@ -0,0 +1,394 @@ +


web3.storage

+

The upload client for https://web3.storage

+ +## Install + +Install the package using npm: + +```console +npm install @web3-storage/upload-client +``` + +## Usage + +[API Reference](#api) + +### Create an Agent + +An Agent provides an `issuer` (a key linked to your account) and `proofs` to show your `issuer` has been delegated the capabilities to store data and register uploads. + +```js +import { Agent } from '@web3-storage/access-client' +import { add as storeAdd } from '@web3-storage/access-client/capabilities/store' +import { add as uploadAdd } from '@web3-storage/access-client/capabilities/upload' + +const agent = await Agent.create({ store }) + +// Note: you need to create and register an account 1st time: +// await agent.createAccount('you@youremail.com') + +const conf = { + issuer: agent.issuer, + proofs: agent.getProofs([storeAdd, uploadAdd]), +} +``` + +### Uploading files + +Once you have the `issuer` and `proofs`, you can upload a directory of files by passing that invocation config to `uploadDirectory` along with your list of files to upload. + +You can get your list of Files from a [``](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file) element in the browser or using [`files-from-path`](https://npm.im/files-from-path) in Node.js + +```js +import { uploadFile } from '@web3-storage/upload-client' + +const cid = await uploadFile(conf, new Blob(['Hello World!'])) +``` + +```js +import { uploadDirectory } from '@web3-storage/upload-client' + +const cid = await uploadDirectory(conf, [ + new File(['doc0'], 'doc0.txt'), + new File(['doc1'], 'dir/doc1.txt'), +]) +``` + +### Advanced usage + +#### Buffering API + +The buffering API loads all data into memory so is suitable only for small files. The root data CID is derived from the data before any transfer to the service takes place. + +```js +import { UnixFS, CAR, Store, Upload } from '@web3-storage/upload-client' + +// Encode a file as a DAG, get back a root data CID and a set of blocks +const { cid, blocks } = await UnixFS.encodeFile(file) +// Encode the DAG as a CAR file +const car = await CAR.encode(blocks, cid) +// Store the CAR file to the service +const carCID = await Store.add(conf, car) +// Register an "upload" - a root CID contained within the passed CAR file(s) +await Upload.add(conf, cid, [carCID]) +``` + +#### Streaming API + +This API offers streaming DAG generation, allowing CAR "shards" to be sent to the service as the DAG is built. It allows files and directories of arbitrary size to be sent to the service while keeping within memory limits of the device. The _last_ CAR file sent contains the root data CID. + +```js +import { + UnixFS, + ShardingStream, + ShardStoringStream, + Upload, +} from '@web3-storage/upload-client' + +const metadatas = [] +// Encode a file as a DAG, get back a readable stream of blocks. +await UnixFS.createFileEncoderStream(file) + // Pipe blocks to a stream that yields CARs files - shards of the DAG. + .pipeThrough(new ShardingStream()) + // Pipe CARs to a stream that stores them to the service and yields metadata + // about the CARs that were stored. + .pipeThrough(new ShardStoringStream(conf)) + // Collect the metadata, we're mostly interested in the CID of each CAR file + // and the root data CID (which can be found in the _last_ CAR file). + .pipeTo( + new WritableStream({ + write: (meta) => { + metadatas.push(meta) + }, + }) + ) + +// The last CAR stored contains the root data CID +const rootCID = metadatas.at(-1).roots[0] +const carCIDs = metadatas.map((meta) => meta.cid) + +// Register an "upload" - a root CID contained within the passed CAR file(s) +await Upload.add(conf, rootCID, carCIDs) +``` + +## API + +- [`uploadDirectory`](#uploaddirectory) +- [`uploadFile`](#uploadfile) +- `CAR` + - [`encode`](#carencode) +- [`ShardingStream`](#shardingstream) +- [`ShardStoringStream`](#shardstoringstream) +- `Store` + - [`add`](#storeadd) + - [`list`](#storelist) + - [`remove`](#storeremove) +- `UnixFS` + - [`createDirectoryEncoderStream`](#unixfscreatedirectoryencoderstream) + - [`createFileEncoderStream`](#unixfscreatefileencoderstream) + - [`encodeDirectory`](#unixfsencodedirectory) + - [`encodeFile`](#unixfsencodefile) +- `Upload` + - [`add`](#uploadadd) + - [`list`](#uploadlist) + - [`remove`](#uploadremove) + +--- + +### `uploadDirectory` + +```ts +function uploadDirectory( + conf: InvocationConfig, + files: File[], + options: { + retries?: number + signal?: AbortSignal + onShardStored: ShardStoredCallback + } = {} +): Promise +``` + +Uploads a directory of files to the service and returns the root data CID for the generated DAG. All files are added to a container directory, with paths in file names preserved. + +Required delegated capability proofs: `store/add`, `upload/add` + +More information: [`InvocationConfig`](#invocationconfig) + +### `uploadFile` + +```ts +function uploadFile( + conf: InvocationConfig, + file: Blob, + options: { + retries?: number + signal?: AbortSignal + onShardStored: ShardStoredCallback + } = {} +): Promise +``` + +Uploads a file to the service and returns the root data CID for the generated DAG. + +Required delegated capability proofs: `store/add`, `upload/add` + +More information: [`InvocationConfig`](#invocationconfig) + +### `CAR.encode` + +```ts +function encode(blocks: Iterable, root?: CID): Promise +``` + +Encode a DAG as a CAR file. + +More information: [`CARFile`](#carfile) + +Example: + +```js +const { cid, blocks } = await UnixFS.encodeFile(new Blob(['data'])) +const car = await CAR.encode(blocks, cid) +``` + +### `ShardingStream` + +```ts +class ShardingStream extends TransformStream +``` + +Shard a set of blocks into a set of CAR files. The last block written to the stream is assumed to be the DAG root and becomes the CAR root CID for the last CAR output. + +More information: [`CARFile`](#carfile) + +### `ShardStoringStream` + +```ts +class ShardStoringStream extends TransformStream +``` + +Stores multiple DAG shards (encoded as CAR files) to the service. + +Note: an "upload" must be registered in order to link multiple shards together as a complete upload. + +The writeable side of this transform stream accepts `CARFile`s and the readable side yields `CARMetadata`, which contains the CAR CID, it's size (in bytes) and it's roots (if it has any). + +### `Store.add` + +```ts +function add( + conf: InvocationConfig, + car: Blob, + options: { retries?: number; signal?: AbortSignal } = {} +): Promise +``` + +Store a CAR file to the service. + +Required delegated capability proofs: `store/add` + +More information: [`InvocationConfig`](#invocationconfig) + +### `Store.list` + +```ts +function list( + conf: InvocationConfig, + options: { retries?: number; signal?: AbortSignal } = {} +): Promise> +``` + +List CAR files stored by the issuer. + +Required delegated capability proofs: `store/list` + +More information: [`InvocationConfig`](#invocationconfig) + +### `Store.remove` + +```ts +function remove( + conf: InvocationConfig, + link: CID, + options: { retries?: number; signal?: AbortSignal } = {} +): Promise +``` + +Remove a stored CAR file by CAR CID. + +Required delegated capability proofs: `store/remove` + +More information: [`InvocationConfig`](#invocationconfig) + +### `UnixFS.createDirectoryEncoderStream` + +```ts +function createDirectoryEncoderStream( + files: Iterable +): ReadableStream +``` + +Creates a `ReadableStream` that yields UnixFS DAG blocks. All files are added to a container directory, with paths in file names preserved. + +Note: you can use https://npm.im/files-from-path to read files from the filesystem in Nodejs. + +### `UnixFS.createFileEncoderStream` + +```ts +function createFileEncoderStream(file: Blob): ReadableStream +``` + +Creates a `ReadableStream` that yields UnixFS DAG blocks. + +### `UnixFS.encodeDirectory` + +```ts +function encodeDirectory( + files: Iterable +): Promise<{ cid: CID; blocks: Block[] }> +``` + +Create a UnixFS DAG from the passed file data. All files are added to a container directory, with paths in file names preserved. + +Note: you can use https://npm.im/files-from-path to read files from the filesystem in Nodejs. + +Example: + +```js +const { cid, blocks } = encodeDirectory([ + new File(['doc0'], 'doc0.txt'), + new File(['doc1'], 'dir/doc1.txt'), +]) +// DAG structure will be: +// bafybei.../doc0.txt +// bafybei.../dir/doc1.txt +``` + +### `UnixFS.encodeFile` + +```ts +function encodeFile(file: Blob): Promise<{ cid: CID; blocks: Block[] }> +``` + +Create a UnixFS DAG from the passed file data. + +Example: + +```js +const { cid, blocks } = await encodeFile(new File(['data'], 'doc.txt')) +// Note: file name is not preserved - use encodeDirectory if required. +``` + +### `Upload.add` + +```ts +function add( + conf: InvocationConfig, + root: CID, + shards: CID[], + options: { retries?: number; signal?: AbortSignal } = {} +): Promise +``` + +Register a set of stored CAR files as an "upload" in the system. A DAG can be split between multipe CAR files. Calling this function allows multiple stored CAR files to be considered as a single upload. + +Required delegated capability proofs: `upload/add` + +More information: [`InvocationConfig`](#invocationconfig) + +### `Upload.list` + +```ts +function list( + conf: InvocationConfig, + options: { retries?: number; signal?: AbortSignal } = {} +): Promise> +``` + +List uploads created by the issuer. + +Required delegated capability proofs: `upload/list` + +More information: [`InvocationConfig`](#invocationconfig) + +### `Upload.remove` + +```ts +function remove( + conf: InvocationConfig, + link: CID, + options: { retries?: number; signal?: AbortSignal } = {} +): Promise +``` + +Remove a upload by root data CID. + +Required delegated capability proofs: `upload/remove` + +More information: [`InvocationConfig`](#invocationconfig) + +## Types + +### `CARFile` + +A `Blob` with two extra properties: + +```ts +type CARFile = Blob & { version: 1; roots: CID[] } +``` + +### `InvocationConfig` + +This is the configuration for the UCAN invocation. It's values can be obtained from an `Agent`. See [Create an Agent](#create-an-agent) for an example. It is an object with `issuer` and `proofs`: + +- The `issuer` is the signing authority that is issuing the UCAN invocation(s). It is typically the user _agent_. +- The `proofs` are a set of capability delegations that prove the issuer has the capability to perform the action. + +## Contributing + +Feel free to join in. All welcome. Please [open an issue](https://github.com/web3-storage/w3protocol/issues)! + +## License + +Dual-licensed under [MIT + Apache 2.0](https://github.com/web3-storage/w3protocol/blob/main/license.md) diff --git a/packages/upload-client/package.json b/packages/upload-client/package.json index de4b4af80..a90881f1c 100644 --- a/packages/upload-client/package.json +++ b/packages/upload-client/package.json @@ -1,7 +1,7 @@ { "name": "@web3-storage/upload-client", "version": "0.0.0", - "description": "The web3.storage client", + "description": "The web3.storage upload client", "homepage": "https://github.com/web3-storage/w3protocol/tree/main/packages/upload-client", "repository": { "type": "git", @@ -14,20 +14,44 @@ "types": "dist/src/index.d.ts", "main": "src/index.js", "scripts": { - "lint": "tsc && eslint '**/*.{js,ts}' && prettier --check '**/*.{js,ts,yml,json}' --ignore-path ../../.gitignore", - "build": "tsc --build", - "test": "npm run test:node && npm run test:browser", - "test:node": "mocha 'test/**/!(*.browser).test.js' -n experimental-vm-modules -n no-warnings", + "lint": "eslint '**/*.{js,ts}' && prettier --check '**/*.{js,ts,yml,json}' --ignore-path ../../.gitignore", + "build": "run-s build:*", + "build:deps": "pnpm -r --filter @web3-storage/access run build", + "build:tsc": "tsc --build", + "test": "npm-run-all -p -r mock test:all", + "test:all": "run-s test:browser test:node", + "test:node": "hundreds -r html -r text mocha 'test/**/!(*.browser).test.js' -n experimental-vm-modules -n no-warnings", "test:browser": "playwright-test 'test/**/!(*.node).test.js'", + "mock": "run-p mock:*", + "mock:bucket-200": "PORT=9200 STATUS=200 node test/helpers/bucket-server.js", + "mock:bucket-401": "PORT=9400 STATUS=400 node test/helpers/bucket-server.js", + "mock:bucket-500": "PORT=9500 STATUS=500 node test/helpers/bucket-server.js", "rc": "npm version prerelease --preid rc" }, "exports": { - ".": "./src/index.js" + ".": "./src/index.js", + "./car": "./src/car.js", + "./sharding": "./src/sharding.js", + "./upload": "./src/upload.js", + "./store": "./src/store.js", + "./unixfs": "./src/unixfs.js" }, "typesVersions": { "*": { - "*": [ - "dist/*" + "car": [ + "dist/src/car.d.ts" + ], + "sharding": [ + "dist/src/sharding.d.ts" + ], + "upload": [ + "dist/src/upload.d.ts" + ], + "store": [ + "dist/src/store.d.ts" + ], + "unixfs": [ + "dist/src/unixfs.d.ts" ] } }, @@ -36,12 +60,34 @@ "dist/src/**/*.d.ts", "dist/src/**/*.d.ts.map" ], + "dependencies": { + "@ipld/car": "^5.0.0", + "@ipld/dag-ucan": "^2.0.1", + "@ipld/unixfs": "^2.0.0", + "@ucanto/client": "^3.0.1", + "@ucanto/core": "^3.0.1", + "@ucanto/interface": "^3.0.0", + "@ucanto/transport": "^3.0.1", + "@web3-storage/access": "workspace:^", + "multiformats": "^10.0.2", + "p-queue": "^7.3.0", + "p-retry": "^5.1.1" + }, "devDependencies": { "@types/assert": "^1.5.6", "@types/mocha": "^10.0.0", + "@ucanto/principal": "^3.0.0", + "@ucanto/server": "^3.0.1", + "@ucanto/validator": "^3.0.1", "assert": "^2.0.0", + "blockstore-core": "^2.0.2", + "c8": "^7.12.0", "hd-scripts": "^3.0.2", + "hundreds": "^0.0.9", + "ipfs-unixfs-exporter": "^9.0.1", "mocha": "^10.1.0", + "npm-run-all": "^4.1.5", + "path": "^0.12.7", "playwright-test": "^8.1.1", "typescript": "^4.8.4" }, @@ -53,13 +99,27 @@ "project": "./tsconfig.json" }, "rules": { - "unicorn/prefer-number-properties": "off" + "unicorn/prefer-number-properties": "off", + "unicorn/no-null": "off", + "unicorn/prefer-set-has": "off", + "unicorn/no-array-for-each": "off", + "unicorn/prefer-export-from": "off", + "unicorn/catch-error-name": "off", + "unicorn/explicit-length-check": "off", + "unicorn/prefer-type-error": "off", + "eqeqeq": "off", + "no-void": "off", + "no-console": "off", + "no-continue": "off", + "jsdoc/check-indentation": "off", + "jsdoc/require-hyphen-before-param-description": "off" }, "env": { "mocha": true }, "ignorePatterns": [ - "dist" + "dist", + "coverage" ] } } diff --git a/packages/upload-client/src/car.js b/packages/upload-client/src/car.js new file mode 100644 index 000000000..f1042c303 --- /dev/null +++ b/packages/upload-client/src/car.js @@ -0,0 +1,31 @@ +import { CarWriter } from '@ipld/car' + +/** + * @param {Iterable|AsyncIterable} blocks + * @param {import('./types').AnyLink} [root] + * @returns {Promise} + */ +export async function encode(blocks, root) { + // @ts-expect-error + const { writer, out } = CarWriter.create(root) + /** @type {Error?} */ + let error + void (async () => { + try { + for await (const block of blocks) { + // @ts-expect-error + await writer.put(block) + } + } catch (/** @type {any} */ err) { + error = err + } finally { + await writer.close() + } + })() + const chunks = [] + for await (const chunk of out) chunks.push(chunk) + // @ts-expect-error + if (error != null) throw error + const roots = root != null ? [root] : [] + return Object.assign(new Blob(chunks), { version: 1, roots }) +} diff --git a/packages/upload-client/src/constants.js b/packages/upload-client/src/constants.js new file mode 100644 index 000000000..a98ccf42a --- /dev/null +++ b/packages/upload-client/src/constants.js @@ -0,0 +1 @@ +export const REQUEST_RETRIES = 3 diff --git a/packages/upload-client/src/index.js b/packages/upload-client/src/index.js index 8901d0373..eeeb6778a 100644 --- a/packages/upload-client/src/index.js +++ b/packages/upload-client/src/index.js @@ -1,4 +1,99 @@ -class Client {} +import * as Storage from './store.js' +import * as Upload from './upload.js' +import * as UnixFS from './unixfs.js' +import * as CAR from './car.js' +import { ShardingStream, ShardStoringStream } from './sharding.js' -export default Client -export { Client } +export { Storage, Upload, UnixFS, CAR } +export * from './sharding.js' + +/** + * @typedef {(meta: import('./types').CARMetadata) => void} StoredShardCallback + * @typedef {import('./types').RequestOptions & { onStoredShard?: StoredShardCallback }} UploadOptions + */ + +/** + * Uploads a file to the service and returns the root data CID for the + * generated DAG. + * + * Required delegated capability proofs: `store/add`, `upload/add` + * + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `store/add` and `upload/add` delegated capability. + * @param {Blob} file File data. + * @param {UploadOptions} [options] + */ +export async function uploadFile({ issuer, proofs }, file, options = {}) { + return await uploadBlockStream( + { issuer, proofs }, + UnixFS.createFileEncoderStream(file), + options + ) +} + +/** + * Uploads a directory of files to the service and returns the root data CID + * for the generated DAG. All files are added to a container directory, with + * paths in file names preserved. + * + * Required delegated capability proofs: `store/add`, `upload/add` + * + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `store/add` and `upload/add` delegated capability. + * @param {import('./types').FileLike[]} files File data. + * @param {UploadOptions} [options] + */ +export async function uploadDirectory({ issuer, proofs }, files, options = {}) { + return await uploadBlockStream( + { issuer, proofs }, + UnixFS.createDirectoryEncoderStream(files), + options + ) +} + +/** + * @param {import('./types').InvocationConfig} invocationConfig + * @param {ReadableStream} blocks + * @param {UploadOptions} [options] + * @returns {Promise} + */ +async function uploadBlockStream({ issuer, proofs }, blocks, options = {}) { + /** @type {import('./types').CARLink[]} */ + const shards = [] + /** @type {import('./types').AnyLink?} */ + let root = null + await blocks + .pipeThrough(new ShardingStream()) + .pipeThrough(new ShardStoringStream({ issuer, proofs }, options)) + .pipeTo( + new WritableStream({ + write(meta) { + root = root || meta.roots[0] + shards.push(meta.cid) + if (options.onStoredShard) options.onStoredShard(meta) + }, + }) + ) + + /* c8 ignore next */ + if (!root) throw new Error('missing root CID') + + await Upload.add({ issuer, proofs }, root, shards, options) + return root +} diff --git a/packages/upload-client/src/service.js b/packages/upload-client/src/service.js new file mode 100644 index 000000000..282ae6644 --- /dev/null +++ b/packages/upload-client/src/service.js @@ -0,0 +1,21 @@ +import { connect } from '@ucanto/client' +import { CAR, CBOR, HTTP } from '@ucanto/transport' +import * as DID from '@ipld/dag-ucan/did' + +export const serviceURL = new URL( + 'https://8609r1772a.execute-api.us-east-1.amazonaws.com' +) +export const serviceDID = DID.parse( + 'did:key:z6MkrZ1r5XBFZjBU34qyD8fueMbMRkKw17BZaq2ivKFjnz2z' +) + +/** @type {import('@ucanto/interface').ConnectionView} */ +export const connection = connect({ + id: serviceDID, + encoder: CAR, + decoder: CBOR, + channel: HTTP.open({ + url: serviceURL, + method: 'POST', + }), +}) diff --git a/packages/upload-client/src/sharding.js b/packages/upload-client/src/sharding.js new file mode 100644 index 000000000..155fc41c9 --- /dev/null +++ b/packages/upload-client/src/sharding.js @@ -0,0 +1,111 @@ +import Queue from 'p-queue' +import { encode } from './car.js' +import { add } from './store.js' + +const SHARD_SIZE = 1024 * 1024 * 100 +const CONCURRENT_UPLOADS = 3 + +/** + * Shard a set of blocks into a set of CAR files. The last block is assumed to + * be the DAG root and becomes the CAR root CID for the last CAR output. + * + * @extends {TransformStream} + */ +export class ShardingStream extends TransformStream { + /** + * @param {object} [options] + * @param {number} [options.shardSize] The target shard size. Actual size of + * CAR output may be bigger due to CAR header and block encoding data. + */ + constructor(options = {}) { + const shardSize = options.shardSize ?? SHARD_SIZE + /** @type {import('@ipld/unixfs').Block[]} */ + let shard = [] + /** @type {import('@ipld/unixfs').Block[] | null} */ + let readyShard = null + let size = 0 + + super({ + async transform(block, controller) { + if (readyShard != null) { + controller.enqueue(await encode(readyShard)) + readyShard = null + } + if (shard.length && size + block.bytes.length > shardSize) { + readyShard = shard + shard = [] + size = 0 + } + shard.push(block) + size += block.bytes.length + }, + + async flush(controller) { + if (readyShard != null) { + controller.enqueue(await encode(readyShard)) + } + + const rootBlock = shard.at(-1) + if (rootBlock != null) { + controller.enqueue(await encode(shard, rootBlock.cid)) + } + }, + }) + } +} + +/** + * Upload multiple DAG shards (encoded as CAR files) to the service. + * + * Note: an "upload" must be registered in order to link multiple shards + * together as a complete upload. + * + * The writeable side of this transform stream accepts CAR files and the + * readable side yields `CARMetadata`. + * + * @extends {TransformStream} + */ +export class ShardStoringStream extends TransformStream { + /** + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `store/add` delegated capability. + * @param {import('./types').RequestOptions} [options] + */ + constructor({ issuer, proofs }, options = {}) { + const queue = new Queue({ concurrency: CONCURRENT_UPLOADS }) + const abortController = new AbortController() + super({ + async transform(car, controller) { + void queue.add( + async () => { + try { + const opts = { ...options, signal: abortController.signal } + const cid = await add({ issuer, proofs }, car, opts) + const { version, roots, size } = car + controller.enqueue({ version, roots, cid, size }) + } catch (err) { + controller.error(err) + abortController.abort(err) + } + }, + { signal: abortController.signal } + ) + + // retain backpressure by not returning until no items queued to be run + await queue.onSizeLessThan(1) + }, + async flush() { + // wait for queue empty AND pending items complete + await queue.onIdle() + }, + }) + } +} diff --git a/packages/upload-client/src/store.js b/packages/upload-client/src/store.js new file mode 100644 index 000000000..d6b2f2253 --- /dev/null +++ b/packages/upload-client/src/store.js @@ -0,0 +1,173 @@ +import { CAR } from '@ucanto/transport' +import * as StoreCapabilities from '@web3-storage/access/capabilities/store' +import retry, { AbortError } from 'p-retry' +import { serviceDID, connection } from './service.js' +import { findCapability } from './utils.js' +import { REQUEST_RETRIES } from './constants.js' + +/** + * Store a DAG encoded as a CAR file. The issuer needs the `store/add` + * delegated capability. + * + * Required delegated capability proofs: `store/add` + * + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `store/add` delegated capability. + * @param {Blob} car CAR file data. + * @param {import('./types').RequestOptions} [options] + * @returns {Promise} + */ +export async function add({ issuer, proofs }, car, options = {}) { + const capability = findCapability(proofs, StoreCapabilities.add.can) + // TODO: validate blob contains CAR data + const bytes = new Uint8Array(await car.arrayBuffer()) + const link = await CAR.codec.link(bytes) + /* c8 ignore next */ + const conn = options.connection ?? connection + const result = await retry( + async () => { + return await StoreCapabilities.add + .invoke({ + issuer, + audience: serviceDID, + // @ts-expect-error expects did:${string} but cap with is ${string}:${string} + with: capability.with, + nb: { link, size: car.size }, + proofs, + }) + .execute(conn) + }, + { + onFailedAttempt: console.warn, + retries: options.retries ?? REQUEST_RETRIES, + } + ) + + if (result.error) { + throw new Error(`failed ${StoreCapabilities.add.can} invocation`, { + cause: result, + }) + } + + // Return early if it was already uploaded. + if (result.status === 'done') { + return link + } + + const res = await retry( + async () => { + try { + const res = await fetch(result.url, { + method: 'PUT', + mode: 'cors', + body: car, + headers: result.headers, + signal: options.signal, + }) + if (res.status >= 400 && res.status < 500) { + throw new AbortError(`upload failed: ${res.status}`) + } + return res + } catch (err) { + if (options.signal?.aborted === true) { + throw new AbortError('upload aborted') + } + throw err + } + }, + { + onFailedAttempt: console.warn, + retries: options.retries ?? REQUEST_RETRIES, + } + ) + + if (!res.ok) { + throw new Error(`upload failed: ${res.status}`) + } + + return link +} + +/** + * List CAR files stored by the issuer. + * + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `store/list` delegated capability. + * @param {import('./types').RequestOptions} [options] + */ +export async function list({ issuer, proofs }, options = {}) { + const capability = findCapability(proofs, StoreCapabilities.list.can) + /* c8 ignore next */ + const conn = options.connection ?? connection + + const result = await StoreCapabilities.list + .invoke({ + issuer, + audience: serviceDID, + // @ts-expect-error expects did:${string} but cap with is ${string}:${string} + with: capability.with, + }) + .execute(conn) + + if (result.error) { + throw new Error(`failed ${StoreCapabilities.list.can} invocation`, { + cause: result, + }) + } + + return result +} + +/** + * Remove a stored CAR file by CAR CID. + * + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `store/remove` delegated capability. + * @param {import('./types').CARLink} link CID of CAR file to remove. + * @param {import('./types').RequestOptions} [options] + */ +export async function remove({ issuer, proofs }, link, options = {}) { + const capability = findCapability(proofs, StoreCapabilities.remove.can) + /* c8 ignore next */ + const conn = options.connection ?? connection + + const result = await StoreCapabilities.remove + .invoke({ + issuer, + audience: serviceDID, + // @ts-expect-error expects did:${string} but cap with is ${string}:${string} + with: capability.with, + nb: { link }, + }) + .execute(conn) + + if (result?.error) { + throw new Error(`failed ${StoreCapabilities.remove.can} invocation`, { + cause: result, + }) + } +} diff --git a/packages/upload-client/src/types.ts b/packages/upload-client/src/types.ts new file mode 100644 index 000000000..f63a38cde --- /dev/null +++ b/packages/upload-client/src/types.ts @@ -0,0 +1,152 @@ +import { Link, UnknownLink, Version } from 'multiformats/link' +import { Block } from '@ipld/unixfs' +import { CAR } from '@ucanto/transport' +import { ServiceMethod, ConnectionView, Signer, Proof } from '@ucanto/interface' +import { + StoreAdd, + StoreList, + StoreRemove, + UploadAdd, + UploadList, + UploadRemove, +} from '@web3-storage/access/capabilities/types' + +export type { + StoreAdd, + StoreList, + StoreRemove, + UploadAdd, + UploadList, + UploadRemove, +} + +export interface Service { + store: { + add: ServiceMethod + list: ServiceMethod, never> + remove: ServiceMethod + } + upload: { + add: ServiceMethod + list: ServiceMethod, never> + remove: ServiceMethod + } +} + +export interface StoreAddResponse { + status: string + headers: Record + url: string +} + +export interface ListResponse { + count: number + page: number + pageSize: number + results?: R[] +} + +export interface StoreListResult { + payloadCID: CARLink + size: number + uploadedAt: number +} + +export interface UploadListResult { + carCID: CARLink + dataCID: Link + uploadedAt: number +} + +export interface InvocationConfig { + /** + * Signing authority that is issuing the UCAN invocations. + */ + issuer: Signer + /** + * Proof(s) the issuer has the capability to perform the action. + */ + proofs: Proof[] +} + +export interface UnixFSEncodeResult { + /** + * Root CID for the DAG. + */ + cid: UnknownLink + /** + * Blocks for the generated DAG. + */ + blocks: Block[] +} + +/** + * Information present in the CAR file header. + */ +export interface CARHeaderInfo { + /** + * CAR version number. + */ + version: number + /** + * Root CIDs present in the CAR header. + */ + roots: Array> +} + +/** + * A DAG encoded as a CAR. + */ +export interface CARFile extends CARHeaderInfo, Blob {} + +/** + * An IPLD Link that has the CAR codec code. + */ +export type CARLink = Link + +/** + * Any IPLD link. + */ +export type AnyLink = Link + +/** + * Metadata pertaining to a CAR file. + */ +export interface CARMetadata extends CARHeaderInfo { + /** + * CID of the CAR file (not the data it contains). + */ + cid: CARLink + /** + * Size of the CAR file in bytes. + */ + size: number +} + +export interface Retryable { + retries?: number +} + +export interface Abortable { + signal?: AbortSignal +} + +export interface Connectable { + connection?: ConnectionView +} + +export type RequestOptions = Retryable & Abortable & Connectable + +export interface BlobLike { + /** + * Returns a ReadableStream which yields the Blob data. + */ + stream: () => ReadableStream +} + +export interface FileLike extends BlobLike { + /** + * Name of the file. May include path information. + */ + name: string +} diff --git a/packages/upload-client/src/unixfs.js b/packages/upload-client/src/unixfs.js new file mode 100644 index 000000000..07e3c8f1a --- /dev/null +++ b/packages/upload-client/src/unixfs.js @@ -0,0 +1,144 @@ +import * as UnixFS from '@ipld/unixfs' +import * as raw from 'multiformats/codecs/raw' + +const queuingStrategy = UnixFS.withCapacity() + +// TODO: configure chunk size and max children https://github.com/ipld/js-unixfs/issues/36 +const settings = UnixFS.configure({ + fileChunkEncoder: raw, + smallFileEncoder: raw, +}) + +/** + * @param {import('./types').BlobLike} blob + * @returns {Promise} + */ +export async function encodeFile(blob) { + const readable = createFileEncoderStream(blob) + const blocks = await collect(readable) + // @ts-expect-error There is always a root block + return { cid: blocks.at(-1).cid, blocks } +} + +/** + * @param {import('./types').BlobLike} blob + * @returns {ReadableStream} + */ +export function createFileEncoderStream(blob) { + /** @type {TransformStream} */ + const { readable, writable } = new TransformStream({}, queuingStrategy) + const unixfsWriter = UnixFS.createWriter({ writable, settings }) + const fileBuilder = new UnixFsFileBuilder(blob) + void (async () => { + await fileBuilder.finalize(unixfsWriter) + await unixfsWriter.close() + })() + return readable +} + +class UnixFsFileBuilder { + #file + + /** @param {{ stream: () => ReadableStream }} file */ + constructor(file) { + this.#file = file + } + + /** @param {import('@ipld/unixfs').View} writer */ + async finalize(writer) { + const unixfsFileWriter = UnixFS.createFileWriter(writer) + await this.#file.stream().pipeTo( + new WritableStream({ + async write(chunk) { + await unixfsFileWriter.write(chunk) + }, + }) + ) + return await unixfsFileWriter.close() + } +} + +class UnixFSDirectoryBuilder { + /** @type {Map} */ + entries = new Map() + + /** @param {import('@ipld/unixfs').View} writer */ + async finalize(writer) { + const dirWriter = UnixFS.createDirectoryWriter(writer) + for (const [name, entry] of this.entries) { + const link = await entry.finalize(writer) + dirWriter.set(name, link) + } + return await dirWriter.close() + } +} + +/** + * @param {Iterable} files + * @returns {Promise} + */ +export async function encodeDirectory(files) { + const readable = createDirectoryEncoderStream(files) + const blocks = await collect(readable) + // @ts-expect-error There is always a root block + return { cid: blocks.at(-1).cid, blocks } +} + +/** + * @param {Iterable} files + * @returns {ReadableStream} + */ +export function createDirectoryEncoderStream(files) { + const rootDir = new UnixFSDirectoryBuilder() + + for (const file of files) { + const path = file.name.split('/') + if (path[0] === '' || path[0] === '.') { + path.shift() + } + let dir = rootDir + for (const [i, name] of path.entries()) { + if (i === path.length - 1) { + dir.entries.set(name, new UnixFsFileBuilder(file)) + break + } + let dirBuilder = dir.entries.get(name) + if (dirBuilder == null) { + dirBuilder = new UnixFSDirectoryBuilder() + dir.entries.set(name, dirBuilder) + } + if (!(dirBuilder instanceof UnixFSDirectoryBuilder)) { + throw new Error(`"${name}" cannot be a file and a directory`) + } + dir = dirBuilder + } + } + + /** @type {TransformStream} */ + const { readable, writable } = new TransformStream({}, queuingStrategy) + const unixfsWriter = UnixFS.createWriter({ writable, settings }) + void (async () => { + await rootDir.finalize(unixfsWriter) + await unixfsWriter.close() + })() + + return readable +} + +/** + * @template T + * @param {ReadableStream} collectable + * @returns {Promise} + */ +async function collect(collectable) { + /** @type {T[]} */ + const chunks = [] + await collectable.pipeTo( + new WritableStream({ + write(chunk) { + chunks.push(chunk) + }, + }) + ) + return chunks +} diff --git a/packages/upload-client/src/upload.js b/packages/upload-client/src/upload.js new file mode 100644 index 000000000..b521f2ec9 --- /dev/null +++ b/packages/upload-client/src/upload.js @@ -0,0 +1,130 @@ +import * as UploadCapabilities from '@web3-storage/access/capabilities/upload' +import retry from 'p-retry' +import { serviceDID, connection } from './service.js' +import { findCapability } from './utils.js' +import { REQUEST_RETRIES } from './constants.js' + +/** + * Register an "upload" with the service. The issuer needs the `upload/add` + * delegated capability. + * + * Required delegated capability proofs: `upload/add` + * + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `upload/add` delegated capability. + * @param {import('multiformats/link').UnknownLink} root Root data CID for the DAG that was stored. + * @param {import('./types').CARLink[]} shards CIDs of CAR files that contain the DAG. + * @param {import('./types').RequestOptions} [options] + */ +export async function add({ issuer, proofs }, root, shards, options = {}) { + const capability = findCapability(proofs, UploadCapabilities.add.can) + /* c8 ignore next */ + const conn = options.connection ?? connection + const result = await retry( + async () => { + return await UploadCapabilities.add + .invoke({ + issuer, + audience: serviceDID, + // @ts-expect-error expects did:${string} but cap with is ${string}:${string} + with: capability.with, + nb: { root, shards }, + }) + .execute(conn) + }, + { + onFailedAttempt: console.warn, + retries: options.retries ?? REQUEST_RETRIES, + } + ) + + if (result?.error) { + throw new Error(`failed ${UploadCapabilities.add.can} invocation`, { + cause: result, + }) + } +} + +/** + * List uploads created by the issuer. + * + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `upload/list` delegated capability. + * @param {import('./types').RequestOptions} [options] + */ +export async function list({ issuer, proofs }, options = {}) { + const capability = findCapability(proofs, UploadCapabilities.list.can) + /* c8 ignore next */ + const conn = options.connection ?? connection + + const result = await UploadCapabilities.list + .invoke({ + issuer, + audience: serviceDID, + // @ts-expect-error expects did:${string} but cap with is ${string}:${string} + with: capability.with, + }) + .execute(conn) + + if (result.error) { + throw new Error(`failed ${UploadCapabilities.list.can} invocation`, { + cause: result, + }) + } + + return result +} + +/** + * Remove an upload by root data CID. + * + * @param {import('./types').InvocationConfig} invocationConfig Configuration + * for the UCAN invocation. An object with `issuer` and `proofs`. + * + * The `issuer` is the signing authority that is issuing the UCAN + * invocation(s). It is typically the user _agent_. + * + * The `proofs` are a set of capability delegations that prove the issuer + * has the capability to perform the action. + * + * The issuer needs the `upload/remove` delegated capability. + * @param {import('multiformats').UnknownLink} root Root data CID to remove. + * @param {import('./types').RequestOptions} [options] + */ +export async function remove({ issuer, proofs }, root, options = {}) { + const capability = findCapability(proofs, UploadCapabilities.remove.can) + /* c8 ignore next */ + const conn = options.connection ?? connection + + const result = await UploadCapabilities.remove + .invoke({ + issuer, + audience: serviceDID, + // @ts-expect-error expects did:${string} but cap with is ${string}:${string} + with: capability.with, + nb: { root }, + }) + .execute(conn) + + if (result?.error) { + throw new Error(`failed ${UploadCapabilities.remove.can} invocation`, { + cause: result, + }) + } +} diff --git a/packages/upload-client/src/utils.js b/packages/upload-client/src/utils.js new file mode 100644 index 000000000..872a3cb9e --- /dev/null +++ b/packages/upload-client/src/utils.js @@ -0,0 +1,38 @@ +import { isDelegation } from '@ucanto/core' + +/** + * @param {import('@ucanto/interface').Proof[]} proofs + * @param {import('@ucanto/interface').Ability} ability + * @param {import('@ucanto/interface').DID} [audience] + */ +export function findCapability(proofs, ability, audience) { + let capability + for (const proof of proofs) { + if (!isDelegation(proof)) continue + if (audience != null && proof.audience.did() !== audience) continue + capability = proof.capabilities.find((c) => + capabilityMatches(c.can, ability) + ) + if (capability) break + } + if (!capability) { + throw new Error( + `Missing proof of delegated capability "${ability}"${ + audience ? ` for audience "${audience}"` : '' + }` + ) + } + return capability +} + +/** + * @param {string} can + * @param {import('@ucanto/interface').Ability} ability + */ +function capabilityMatches(can, ability) { + if (can === ability) return true + if (can === '*/*') return true + if (can === '*') return true + if (can.endsWith('*') && ability.startsWith(can.slice(0, -1))) return true + return false +} diff --git a/packages/upload-client/test/car.test.js b/packages/upload-client/test/car.test.js new file mode 100644 index 000000000..369b6a542 --- /dev/null +++ b/packages/upload-client/test/car.test.js @@ -0,0 +1,16 @@ +import assert from 'assert' +import { CID } from 'multiformats' +import { encode } from '../src/car.js' + +describe('CAR.encode', () => { + it('propagates error when source throws', async () => { + // eslint-disable-next-line require-yield + const blocks = (async function* () { + throw new Error('boom') + })() + const root = CID.parse( + 'bafkreigh2akiscaildcqabsyg3dfr6chu3fgpregiymsck7e7aqa4s52zy' + ) + await assert.rejects(encode(blocks, root), { message: 'boom' }) + }) +}) diff --git a/packages/upload-client/test/fixtures.js b/packages/upload-client/test/fixtures.js new file mode 100644 index 000000000..50a464a0a --- /dev/null +++ b/packages/upload-client/test/fixtures.js @@ -0,0 +1,6 @@ +import * as ed25519 from '@ucanto/principal/ed25519' + +/** did:key:z6MkrZ1r5XBFZjBU34qyD8fueMbMRkKw17BZaq2ivKFjnz2z */ +export const serviceSigner = ed25519.parse( + 'MgCYKXoHVy7Vk4/QjcEGi+MCqjntUiasxXJ8uJKY0qh11e+0Bs8WsdqGK7xothgrDzzWD0ME7ynPjz2okXDh8537lId8=' +) diff --git a/packages/upload-client/test/helpers/bucket-server.js b/packages/upload-client/test/helpers/bucket-server.js new file mode 100644 index 000000000..9db6842fd --- /dev/null +++ b/packages/upload-client/test/helpers/bucket-server.js @@ -0,0 +1,15 @@ +import { createServer } from 'http' + +const port = process.env.PORT ?? 9000 +const status = process.env.STATUS ? parseInt(process.env.STATUS) : 200 + +const server = createServer((req, res) => { + res.setHeader('Access-Control-Allow-Origin', '*') + res.setHeader('Access-Control-Allow-Methods', '*') + res.setHeader('Access-Control-Allow-Headers', '*') + if (req.method === 'OPTIONS') return res.end() + res.statusCode = status + res.end() +}) + +server.listen(port, () => console.log(`Listening on :${port}`)) diff --git a/packages/upload-client/test/helpers/mocks.js b/packages/upload-client/test/helpers/mocks.js new file mode 100644 index 000000000..4d1386aae --- /dev/null +++ b/packages/upload-client/test/helpers/mocks.js @@ -0,0 +1,42 @@ +import * as Server from '@ucanto/server' + +const notImplemented = () => { + throw new Server.Failure('not implemented') +} + +/** + * @param {Partial<{ + * store: Partial + * upload: Partial + * }>} impl + */ +export function mockService(impl) { + return { + store: { + add: withCallCount(impl.store?.add ?? notImplemented), + list: withCallCount(impl.store?.list ?? notImplemented), + remove: withCallCount(impl.store?.remove ?? notImplemented), + }, + upload: { + add: withCallCount(impl.upload?.add ?? notImplemented), + list: withCallCount(impl.upload?.list ?? notImplemented), + remove: withCallCount(impl.upload?.remove ?? notImplemented), + }, + } +} + +/** + * @template {Function} T + * @param {T} fn + */ +function withCallCount(fn) { + /** @param {T extends (...args: infer A) => any ? A : never} args */ + const countedFn = (...args) => { + countedFn.called = true + countedFn.callCount++ + return fn(...args) + } + countedFn.called = false + countedFn.callCount = 0 + return countedFn +} diff --git a/packages/upload-client/test/helpers/random.js b/packages/upload-client/test/helpers/random.js new file mode 100644 index 000000000..1b0360864 --- /dev/null +++ b/packages/upload-client/test/helpers/random.js @@ -0,0 +1,49 @@ +import { CarWriter } from '@ipld/car' +import { CID } from 'multiformats/cid' +import * as raw from 'multiformats/codecs/raw' +import { sha256 } from 'multiformats/hashes/sha2' +import * as CAR from '@ucanto/transport/car' + +/** @param {number} size */ +export async function randomBytes(size) { + const bytes = new Uint8Array(size) + while (size) { + const chunk = new Uint8Array(Math.min(size, 65_536)) + if (!globalThis.crypto) { + try { + const { webcrypto } = await import('node:crypto') + webcrypto.getRandomValues(chunk) + } catch (err) { + throw new Error( + 'unknown environment - no global crypto and not Node.js', + { cause: err } + ) + } + } else { + crypto.getRandomValues(chunk) + } + size -= bytes.length + bytes.set(chunk, size) + } + return bytes +} + +/** @param {number} size */ +export async function randomCAR(size) { + const bytes = await randomBytes(size) + const hash = await sha256.digest(bytes) + const root = CID.create(1, raw.code, hash) + + const { writer, out } = CarWriter.create(root) + writer.put({ cid: root, bytes }) + writer.close() + + const chunks = [] + for await (const chunk of out) { + chunks.push(chunk) + } + const blob = new Blob(chunks) + const cid = await CAR.codec.link(new Uint8Array(await blob.arrayBuffer())) + + return Object.assign(blob, { cid, roots: [root] }) +} diff --git a/packages/upload-client/test/helpers/shims.js b/packages/upload-client/test/helpers/shims.js new file mode 100644 index 000000000..26b0c5c1f --- /dev/null +++ b/packages/upload-client/test/helpers/shims.js @@ -0,0 +1,10 @@ +export class File extends Blob { + /** + * @param {BlobPart[]} blobParts + * @param {string} name + */ + constructor(blobParts, name) { + super(blobParts) + this.name = name + } +} diff --git a/packages/upload-client/test/index.test.js b/packages/upload-client/test/index.test.js index bfcd8cc1e..ed91b9bb4 100644 --- a/packages/upload-client/test/index.test.js +++ b/packages/upload-client/test/index.test.js @@ -1,8 +1,182 @@ import assert from 'assert' -import Client from '../src/index.js' +import * as Client from '@ucanto/client' +import * as Server from '@ucanto/server' +import * as CAR from '@ucanto/transport/car' +import * as CBOR from '@ucanto/transport/cbor' +import * as Signer from '@ucanto/principal/ed25519' +import * as StoreCapabilities from '@web3-storage/access/capabilities/store' +import * as UploadCapabilities from '@web3-storage/access/capabilities/upload' +import { uploadFile, uploadDirectory } from '../src/index.js' +import { serviceSigner } from './fixtures.js' +import { randomBytes } from './helpers/random.js' +import { File } from './helpers/shims.js' +import { mockService } from './helpers/mocks.js' -describe('index', function () { - it('should export a client object', () => { - assert(Client) +describe('uploadFile', () => { + it('uploads a file to the service', async () => { + const res = { + status: 'upload', + headers: { 'x-test': 'true' }, + url: 'http://localhost:9200', + } + + const account = await Signer.generate() + const issuer = await Signer.generate() // The "user" that will ask the service to accept the upload + const file = new Blob([await randomBytes(128)]) + /** @type {import('../src/types').CARLink|undefined} */ + let carCID + + const proofs = await Promise.all([ + StoreCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + UploadCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ]) + + const service = mockService({ + store: { + add(invocation) { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, StoreCapabilities.add.can) + assert.equal(invCap.with, account.did()) + return res + }, + }, + upload: { + add: (invocation) => { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, UploadCapabilities.add.can) + assert.equal(invCap.with, account.did()) + assert.equal(invCap.nb.shards?.length, 1) + assert.equal(String(invCap.nb.shards?.[0]), carCID?.toString()) + return null + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + const dataCID = await uploadFile({ issuer, proofs }, file, { + connection, + onStoredShard: (meta) => { + carCID = meta.cid + }, + }) + + assert(service.store.add.called) + assert.equal(service.store.add.callCount, 1) + assert(service.upload.add.called) + assert.equal(service.upload.add.callCount, 1) + + assert(carCID) + assert(dataCID) + }) +}) + +describe('uploadDirectory', () => { + it('uploads a directory to the service', async () => { + const res = { + status: 'upload', + headers: { 'x-test': 'true' }, + url: 'http://localhost:9200', + } + + const account = await Signer.generate() + const issuer = await Signer.generate() + const files = [ + new File([await randomBytes(128)], '1.txt'), + new File([await randomBytes(32)], '2.txt'), + ] + /** @type {import('../src/types').CARLink?} */ + let carCID = null + + const proofs = await Promise.all([ + StoreCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + UploadCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ]) + + const service = mockService({ + store: { + add(invocation) { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, 'store/add') + assert.equal(invCap.with, account.did()) + return res + }, + }, + upload: { + add: (invocation) => { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, 'upload/add') + assert.equal(invCap.with, account.did()) + assert.equal(invCap.nb.shards?.length, 1) + assert.equal(String(invCap.nb.shards?.[0]), carCID?.toString()) + return null + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + const dataCID = await uploadDirectory({ issuer, proofs }, files, { + connection, + onStoredShard: (meta) => { + carCID = meta.cid + }, + }) + + assert(service.store.add.called) + assert.equal(service.store.add.callCount, 1) + assert(service.upload.add.called) + assert.equal(service.upload.add.callCount, 1) + + assert(carCID) + assert(dataCID) }) }) diff --git a/packages/upload-client/test/sharding.test.js b/packages/upload-client/test/sharding.test.js new file mode 100644 index 000000000..106412ae2 --- /dev/null +++ b/packages/upload-client/test/sharding.test.js @@ -0,0 +1,171 @@ +import assert from 'assert' +import * as Client from '@ucanto/client' +import * as Server from '@ucanto/server' +import * as CAR from '@ucanto/transport/car' +import * as CBOR from '@ucanto/transport/cbor' +import * as Signer from '@ucanto/principal/ed25519' +import { add as storeAdd } from '@web3-storage/access/capabilities/store' +import { createFileEncoderStream } from '../src/unixfs.js' +import { ShardingStream, ShardStoringStream } from '../src/sharding.js' +import { serviceSigner } from './fixtures.js' +import { randomBytes, randomCAR } from './helpers/random.js' +import { mockService } from './helpers/mocks.js' + +describe('ShardingStream', () => { + it('creates shards from blocks', async () => { + const file = new Blob([await randomBytes(1024 * 1024)]) + const shardSize = 512 * 1024 + + /** @type {import('../src/types').CARFile[]} */ + const shards = [] + + await createFileEncoderStream(file) + .pipeThrough(new ShardingStream({ shardSize })) + .pipeTo( + new WritableStream({ + write: (s) => { + shards.push(s) + }, + }) + ) + + assert(shards.length > 1) + + for (const car of shards) { + // add 100 bytes leeway to the chunk size for encoded CAR data + assert(car.size <= shardSize + 100) + } + }) +}) + +describe('ShardStoringStream', () => { + it('stores multiple DAGs with the service', async () => { + const res = { + status: 'upload', + headers: { 'x-test': 'true' }, + url: 'http://localhost:9200', + } + + const account = await Signer.generate() + const issuer = await Signer.generate() + const cars = await Promise.all([randomCAR(128), randomCAR(128)]) + let invokes = 0 + + const proofs = [ + await storeAdd.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + store: { + add(invocation) { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, 'store/add') + assert.equal(invCap.with, account.did()) + assert.equal(String(invCap.nb.link), cars[invokes].cid.toString()) + invokes++ + return res + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + let pulls = 0 + const carStream = new ReadableStream({ + pull(controller) { + if (pulls >= cars.length) return controller.close() + controller.enqueue(cars[pulls]) + pulls++ + }, + }) + + /** @type {import('../src/types').CARLink[]} */ + const carCIDs = [] + await carStream + .pipeThrough(new ShardStoringStream({ issuer, proofs }, { connection })) + .pipeTo( + new WritableStream({ + write: ({ cid }) => { + carCIDs.push(cid) + }, + }) + ) + + cars.forEach(({ cid }, i) => + assert.equal(cid.toString(), carCIDs[i].toString()) + ) + + assert(service.store.add.called) + assert.equal(service.store.add.callCount, 2) + }) + + it('aborts on service failure', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + const cars = await Promise.all([randomCAR(128), randomCAR(128)]) + + const proofs = [ + await storeAdd.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + store: { + add() { + throw new Server.Failure('boom') + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + let pulls = 0 + const carStream = new ReadableStream({ + pull(controller) { + if (pulls >= cars.length) return controller.close() + controller.enqueue(cars[pulls]) + pulls++ + }, + }) + + await assert.rejects( + carStream + .pipeThrough(new ShardStoringStream({ issuer, proofs }, { connection })) + .pipeTo(new WritableStream()), + { message: 'failed store/add invocation' } + ) + }) +}) diff --git a/packages/upload-client/test/store.test.js b/packages/upload-client/test/store.test.js new file mode 100644 index 000000000..d25556f20 --- /dev/null +++ b/packages/upload-client/test/store.test.js @@ -0,0 +1,444 @@ +import assert from 'assert' +import * as Client from '@ucanto/client' +import * as Server from '@ucanto/server' +import * as CAR from '@ucanto/transport/car' +import * as CBOR from '@ucanto/transport/cbor' +import * as Signer from '@ucanto/principal/ed25519' +import * as StoreCapabilities from '@web3-storage/access/capabilities/store' +import * as Store from '../src/store.js' +import { serviceSigner } from './fixtures.js' +import { randomCAR } from './helpers/random.js' +import { mockService } from './helpers/mocks.js' + +describe('Store.add', () => { + it('stores a DAG with the service', async () => { + const res = { + status: 'upload', + headers: { 'x-test': 'true' }, + url: 'http://localhost:9200', + } + + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await StoreCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + store: { + add(invocation) { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, StoreCapabilities.add.can) + assert.equal(invCap.with, account.did()) + assert.equal(String(invCap.nb.link), car.cid.toString()) + return res + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + const carCID = await Store.add({ issuer, proofs }, car, { connection }) + + assert(service.store.add.called) + assert.equal(service.store.add.callCount, 1) + + assert(carCID) + assert.equal(carCID.toString(), car.cid.toString()) + }) + + it('throws for bucket URL client error 4xx', async () => { + const res = { + status: 'upload', + headers: { 'x-test': 'true' }, + url: 'http://localhost:9400', // this bucket always returns a 400 + } + + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await StoreCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ store: { add: () => res } }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + assert.rejects(Store.add({ issuer, proofs }, car, { connection }), { + message: 'upload failed: 400', + }) + }) + + it('throws for bucket URL server error 5xx', async () => { + const res = { + status: 'upload', + headers: { 'x-test': 'true' }, + url: 'http://localhost:9500', // this bucket always returns a 500 + } + + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await StoreCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ store: { add: () => res } }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + assert.rejects(Store.add({ issuer, proofs }, car, { connection }), { + message: 'upload failed: 500', + }) + }) + + it('skips sending CAR if status = done', async () => { + const res = { + status: 'done', + headers: { 'x-test': 'true' }, + url: 'http://localhost:9001', // will fail the test if called + } + + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await StoreCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ store: { add: () => res } }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + const carCID = await Store.add({ issuer, proofs }, car, { connection }) + + assert(service.store.add.called) + assert.equal(service.store.add.callCount, 1) + + assert(carCID) + assert.equal(carCID.toString(), car.cid.toString()) + }) + + it('aborts', async () => { + const res = { + status: 'upload', + headers: { 'x-test': 'true' }, + url: 'http://localhost:9001', // will fail the test if called + } + + const service = mockService({ store: { add: () => res } }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await StoreCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const controller = new AbortController() + controller.abort() // already aborted + + await assert.rejects( + Store.add({ issuer, proofs }, car, { + connection, + signal: controller.signal, + }), + { name: 'Error', message: 'upload aborted' } + ) + }) +}) + +describe('Store.list', () => { + it('lists stored CAR files', async () => { + const car = await randomCAR(128) + const res = { + page: 1, + pageSize: 1000, + count: 1, + results: [ + { + payloadCID: car.cid, + size: 123, + uploadedAt: Date.now(), + }, + ], + } + + const account = await Signer.generate() + const issuer = await Signer.generate() + + const proofs = [ + await StoreCapabilities.list.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + store: { + list(invocation) { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, StoreCapabilities.list.can) + assert.equal(invCap.with, account.did()) + return res + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + const list = await Store.list({ issuer, proofs }, { connection }) + + assert(service.store.list.called) + assert.equal(service.store.list.callCount, 1) + + assert.equal(list.count, res.count) + assert.equal(list.page, res.page) + assert.equal(list.pageSize, res.pageSize) + assert(list.results) + assert.equal(list.results.length, res.results.length) + list.results.forEach((r, i) => { + assert.equal( + r.payloadCID.toString(), + res.results[i].payloadCID.toString() + ) + assert.equal(r.size, res.results[i].size) + assert.equal(r.uploadedAt, res.results[i].uploadedAt) + }) + }) + + it('throws on service error', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + + const proofs = [ + await StoreCapabilities.list.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + store: { + list: () => { + throw new Server.Failure('boom') + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + await assert.rejects(Store.list({ issuer, proofs }, { connection }), { + message: 'failed store/list invocation', + }) + }) +}) + +describe('Store.remove', () => { + it('removes a stored CAR file', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await StoreCapabilities.remove.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + store: { + remove(invocation) { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, StoreCapabilities.remove.can) + assert.equal(invCap.with, account.did()) + assert.equal(String(invCap.nb.link), car.cid.toString()) + return null + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + await Store.remove({ issuer, proofs }, car.cid, { connection }) + + assert(service.store.remove.called) + assert.equal(service.store.remove.callCount, 1) + }) + + it('throws on service error', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await StoreCapabilities.remove.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + store: { + remove: () => { + throw new Server.Failure('boom') + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + await assert.rejects( + Store.remove({ issuer, proofs }, car.cid, { connection }), + { message: 'failed store/remove invocation' } + ) + }) +}) diff --git a/packages/upload-client/test/unixfs.test.js b/packages/upload-client/test/unixfs.test.js new file mode 100644 index 000000000..7f197c9eb --- /dev/null +++ b/packages/upload-client/test/unixfs.test.js @@ -0,0 +1,84 @@ +import assert from 'assert' +import { exporter } from 'ipfs-unixfs-exporter' +import { MemoryBlockstore } from 'blockstore-core/memory' +import * as raw from 'multiformats/codecs/raw' +import path from 'path' +import { encodeFile, encodeDirectory } from '../src/unixfs.js' +import { File } from './helpers/shims.js' + +/** @param {import('ipfs-unixfs-exporter').UnixFSDirectory} dir */ +async function collectDir(dir) { + /** @type {import('ipfs-unixfs-exporter').UnixFSEntry[]} */ + const entries = [] + for await (const entry of dir.content()) { + if (entry.type === 'directory') { + entries.push(...(await collectDir(entry))) + } else { + entries.push(entry) + } + } + return entries +} + +/** @param {Iterable} blocks */ +async function blocksToBlockstore(blocks) { + const blockstore = new MemoryBlockstore() + for (const block of blocks) { + // @ts-expect-error https://github.com/ipld/js-unixfs/issues/30 + await blockstore.put(block.cid, block.bytes) + } + return blockstore +} + +describe('UnixFS', () => { + it('encodes a file', async () => { + const file = new Blob(['test']) + const { cid, blocks } = await encodeFile(file) + const blockstore = await blocksToBlockstore(blocks) + const entry = await exporter(cid.toString(), blockstore) + const chunks = [] + for await (const chunk of entry.content()) chunks.push(chunk) + const out = new Blob(chunks) + assert.equal(await out.text(), await file.text()) + }) + + it('encodes a directory', async () => { + const files = [ + new File(['top level'], 'aaaaa.txt'), + new File(['top level dot prefix'], './bbb.txt'), + new File(['top level slash prefix'], '/c.txt'), + new File(['in a dir'], 'dir/two.txt'), + new File(['another in a dir'], 'dir/three.txt'), + new File(['in deeper in dir'], 'dir/deeper/four.png'), + new File(['back in the parent'], 'dir/five.pdf'), + new File(['another in the child'], 'dir/deeper/six.mp4'), + ] + + const { cid, blocks } = await encodeDirectory(files) + const blockstore = await blocksToBlockstore(blocks) + const dirEntry = await exporter(cid.toString(), blockstore) + assert.equal(dirEntry.type, 'directory') + + const expectedPaths = files.map((f) => path.join(cid.toString(), f.name)) + // @ts-expect-error + const entries = await collectDir(dirEntry) + const actualPaths = entries.map((e) => e.path) + + expectedPaths.forEach((p) => assert(actualPaths.includes(p))) + }) + + it('throws then treating a file as a directory', () => + assert.rejects( + encodeDirectory([ + new File(['a file, not a directory'], 'file.txt'), + new File(['a file in a file!!!'], 'file.txt/another.txt'), + ]), + { message: '"file.txt" cannot be a file and a directory' } + )) + + it('configured to use raw leaves', async () => { + const file = new Blob(['test']) + const { cid } = await encodeFile(file) + assert.equal(cid.code, raw.code) + }) +}) diff --git a/packages/upload-client/test/upload.test.js b/packages/upload-client/test/upload.test.js new file mode 100644 index 000000000..3727bac1e --- /dev/null +++ b/packages/upload-client/test/upload.test.js @@ -0,0 +1,305 @@ +import assert from 'assert' +import * as Client from '@ucanto/client' +import * as Server from '@ucanto/server' +import * as CAR from '@ucanto/transport/car' +import * as CBOR from '@ucanto/transport/cbor' +import * as Signer from '@ucanto/principal/ed25519' +import * as UploadCapabilities from '@web3-storage/access/capabilities/upload' +import * as Upload from '../src/upload.js' +import { serviceSigner } from './fixtures.js' +import { randomCAR } from './helpers/random.js' +import { mockService } from './helpers/mocks.js' + +describe('Upload.add', () => { + it('registers an upload with the service', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await UploadCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + upload: { + add: (invocation) => { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, UploadCapabilities.add.can) + assert.equal(invCap.with, account.did()) + assert.equal(String(invCap.nb.root), car.roots[0].toString()) + assert.equal(invCap.nb.shards?.length, 1) + assert.equal(String(invCap.nb.shards?.[0]), car.cid.toString()) + return null + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + const root = car.roots[0] + await Upload.add({ issuer, proofs }, root, [car.cid], { connection }) + + assert(service.upload.add.called) + assert.equal(service.upload.add.callCount, 1) + }) + + it('throws on service error', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await UploadCapabilities.add.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + upload: { + add: () => { + throw new Server.Failure('boom') + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + await assert.rejects( + Upload.add({ issuer, proofs }, car.roots[0], [car.cid], { connection }), + { message: 'failed upload/add invocation' } + ) + }) +}) + +describe('Upload.list', () => { + it('lists uploads', async () => { + const car = await randomCAR(128) + const res = { + page: 1, + pageSize: 1000, + count: 1, + results: [ + { + carCID: car.cid, + dataCID: car.roots[0], + uploadedAt: Date.now(), + }, + ], + } + + const account = await Signer.generate() + const issuer = await Signer.generate() + + const proofs = [ + await UploadCapabilities.list.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + upload: { + list(invocation) { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, UploadCapabilities.list.can) + assert.equal(invCap.with, account.did()) + return res + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + const list = await Upload.list({ issuer, proofs }, { connection }) + + assert(service.upload.list.called) + assert.equal(service.upload.list.callCount, 1) + + assert.equal(list.count, res.count) + assert.equal(list.page, res.page) + assert.equal(list.pageSize, res.pageSize) + assert(list.results) + assert.equal(list.results.length, res.results.length) + list.results.forEach((r, i) => { + assert.equal(r.carCID.toString(), res.results[i].carCID.toString()) + assert.equal(r.dataCID.toString(), res.results[i].dataCID.toString()) + assert.equal(r.uploadedAt, res.results[i].uploadedAt) + }) + }) + + it('throws on service error', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + + const proofs = [ + await UploadCapabilities.list.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + upload: { + list: () => { + throw new Server.Failure('boom') + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + await assert.rejects(Upload.list({ issuer, proofs }, { connection }), { + message: 'failed upload/list invocation', + }) + }) +}) + +describe('Upload.remove', () => { + it('removes an upload', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await UploadCapabilities.remove.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + upload: { + remove(invocation) { + assert.equal(invocation.issuer.did(), issuer.did()) + assert.equal(invocation.capabilities.length, 1) + const invCap = invocation.capabilities[0] + assert.equal(invCap.can, UploadCapabilities.remove.can) + assert.equal(invCap.with, account.did()) + assert.equal(String(invCap.nb.root), car.roots[0].toString()) + return null + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + await Upload.remove({ issuer, proofs }, car.roots[0], { connection }) + + assert(service.upload.remove.called) + assert.equal(service.upload.remove.callCount, 1) + }) + + it('throws on service error', async () => { + const account = await Signer.generate() + const issuer = await Signer.generate() + const car = await randomCAR(128) + + const proofs = [ + await UploadCapabilities.remove.delegate({ + issuer: account, + audience: serviceSigner, + with: account.did(), + expiration: Infinity, + }), + ] + + const service = mockService({ + upload: { + remove: () => { + throw new Server.Failure('boom') + }, + }, + }) + + const server = Server.create({ + id: serviceSigner, + service, + decoder: CAR, + encoder: CBOR, + }) + const connection = Client.connect({ + id: serviceSigner, + encoder: CAR, + decoder: CBOR, + channel: server, + }) + + await assert.rejects( + Upload.remove({ issuer, proofs }, car.roots[0], { connection }), + { message: 'failed upload/remove invocation' } + ) + }) +}) diff --git a/packages/upload-client/test/utils.test.js b/packages/upload-client/test/utils.test.js new file mode 100644 index 000000000..9e81ca9c2 --- /dev/null +++ b/packages/upload-client/test/utils.test.js @@ -0,0 +1,127 @@ +import assert from 'assert' +import * as Signer from '@ucanto/principal/ed25519' +import { capability, URI } from '@ucanto/validator' +import { any } from '@web3-storage/access/capabilities/any' +import * as StoreCapabilities from '@web3-storage/access/capabilities/store' +import { equalWith } from '@web3-storage/access/capabilities/utils' +import { serviceSigner } from './fixtures.js' +import { findCapability } from '../src/utils.js' + +describe('findCapability', () => { + it('throws when capability is not found', () => { + assert.throws(() => findCapability([], 'store/add'), { + message: 'Missing proof of delegated capability "store/add"', + }) + }) + + it('throws for mismatched audience', async () => { + const issuer = await Signer.generate() + const proofs = [ + await StoreCapabilities.add.delegate({ + issuer, + audience: serviceSigner, + with: issuer.did(), + expiration: Infinity, + }), + ] + + // we match on `audience`. Passing in the issuer or any other DID here should fail. + assert.throws(() => findCapability(proofs, 'store/add', issuer.did()), { + message: `Missing proof of delegated capability "store/add" for audience "${issuer.did()}"`, + }) + }) + + it('matches capability', async () => { + const issuer = await Signer.generate() + const proofs = [ + await StoreCapabilities.add.delegate({ + issuer, + audience: serviceSigner, + with: issuer.did(), + expiration: Infinity, + }), + ] + + const cap = findCapability(proofs, 'store/add') + assert.equal(cap.can, 'store/add') + }) + + it('matches any wildcard capability', async () => { + const issuer = await Signer.generate() + const proofs = [ + await any.delegate({ + issuer, + audience: serviceSigner, + with: issuer.did(), + expiration: Infinity, + }), + ] + + const cap = findCapability(proofs, 'store/add') + assert.equal(cap.can, '*') + }) + + it('matches top wildcard capability', async () => { + const issuer = await Signer.generate() + const proofs = [ + await capability({ + can: '*/*', + with: URI.match({ protocol: 'did:' }), + derives: equalWith, + }).delegate({ + issuer, + audience: serviceSigner, + with: issuer.did(), + expiration: Infinity, + }), + ] + + const cap = findCapability(proofs, 'store/add') + assert.equal(cap.can, '*/*') + }) + + it('matches wildcard capability', async () => { + const issuer = await Signer.generate() + const proofs = [ + await StoreCapabilities.store.delegate({ + issuer, + audience: serviceSigner, + with: issuer.did(), + expiration: Infinity, + }), + ] + + const cap = findCapability(proofs, 'store/add') + assert.equal(cap.can, 'store/*') + }) + + it('matches wildcard capability with audience', async () => { + const issuer = await Signer.generate() + const proofs = [ + await StoreCapabilities.store.delegate({ + issuer, + audience: serviceSigner, + with: issuer.did(), + expiration: Infinity, + }), + ] + + const cap = findCapability(proofs, 'store/add', serviceSigner.did()) + assert.equal(cap.can, 'store/*') + }) + + it('ignores non-delegation proofs', async () => { + const issuer = await Signer.generate() + const delegation = await StoreCapabilities.store.delegate({ + issuer, + audience: serviceSigner, + with: issuer.did(), + expiration: Infinity, + }) + const proofs = [delegation.cid] + + assert.throws(() => findCapability(proofs, 'store/add'), { + message: 'Missing proof of delegated capability "store/add"', + }) + }) +}) diff --git a/packages/upload-client/tsconfig.json b/packages/upload-client/tsconfig.json index 180297270..c3fbadc5a 100644 --- a/packages/upload-client/tsconfig.json +++ b/packages/upload-client/tsconfig.json @@ -6,5 +6,6 @@ "emitDeclarationOnly": true }, "include": ["src", "scripts", "test", "package.json"], - "exclude": ["**/node_modules/**"] + "exclude": ["**/node_modules/**"], + "references": [{ "path": "../access" }] } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ed37c36f4..08833bd9d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -261,19 +261,60 @@ importers: packages/upload-client: specifiers: + '@ipld/car': ^5.0.0 + '@ipld/dag-ucan': ^2.0.1 + '@ipld/unixfs': ^2.0.0 '@types/assert': ^1.5.6 '@types/mocha': ^10.0.0 + '@ucanto/client': ^3.0.1 + '@ucanto/core': ^3.0.1 + '@ucanto/interface': ^3.0.0 + '@ucanto/principal': ^3.0.0 + '@ucanto/server': ^3.0.1 + '@ucanto/transport': ^3.0.1 + '@ucanto/validator': ^3.0.1 + '@web3-storage/access': workspace:^ assert: ^2.0.0 + blockstore-core: ^2.0.2 + c8: ^7.12.0 hd-scripts: ^3.0.2 + hundreds: ^0.0.9 + ipfs-unixfs-exporter: ^9.0.1 mocha: ^10.1.0 + multiformats: ^10.0.2 + npm-run-all: ^4.1.5 + p-queue: ^7.3.0 + p-retry: ^5.1.1 + path: ^0.12.7 playwright-test: ^8.1.1 typescript: ^4.8.4 + dependencies: + '@ipld/car': 5.0.0 + '@ipld/dag-ucan': 2.0.1 + '@ipld/unixfs': 2.0.0 + '@ucanto/client': 3.0.2 + '@ucanto/core': 3.0.2 + '@ucanto/interface': 3.0.1 + '@ucanto/transport': 3.0.2 + '@web3-storage/access': link:../access + multiformats: 10.0.2 + p-queue: 7.3.0 + p-retry: 5.1.1 devDependencies: '@types/assert': 1.5.6 '@types/mocha': 10.0.0 + '@ucanto/principal': 3.0.1 + '@ucanto/server': 3.0.4 + '@ucanto/validator': 3.0.4 assert: 2.0.0 + blockstore-core: 2.0.2 + c8: 7.12.0 hd-scripts: 3.0.2 + hundreds: 0.0.9 + ipfs-unixfs-exporter: 9.0.1 mocha: 10.1.0 + npm-run-all: 4.1.5 + path: 0.12.7 playwright-test: 8.1.1 typescript: 4.8.4 @@ -345,6 +386,10 @@ packages: regenerator-runtime: 0.13.10 dev: true + /@bcoe/v8-coverage/0.2.3: + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + dev: true + /@cloudflare/kv-asset-handler/0.2.0: resolution: {integrity: sha512-MVbXLbTcAotOPUj0pAMhVtJ+3/kFkwJqc5qNOleOZTv6QkZZABDMS21dSrSlVswEHwrpWC03e4fWytjqKvuE2A==} dependencies: @@ -463,7 +508,6 @@ packages: cborg: 1.9.5 multiformats: 10.0.2 varint: 6.0.0 - dev: false /@ipld/dag-cbor/8.0.0: resolution: {integrity: sha512-VfedC21yAD/ZIahcrHTeMcc17kEVRlCmHQl0JY9/Rwbd102v0QcuXtBN8KGH8alNO82S89+H6MM/hxP85P4Veg==} @@ -471,7 +515,6 @@ packages: dependencies: cborg: 1.9.5 multiformats: 10.0.2 - dev: false /@ipld/dag-json/9.0.1: resolution: {integrity: sha512-dL5Xhrk0XXoq3lSsY2LNNraH2Nxx4nlgQwSarl2J3oir2jBDQEiBDW8bjgr30ni8/epdWDhXm5mdxat8dFWwGQ==} @@ -479,14 +522,36 @@ packages: dependencies: cborg: 1.9.5 multiformats: 10.0.2 + + /@ipld/dag-pb/2.1.18: + resolution: {integrity: sha512-ZBnf2fuX9y3KccADURG5vb9FaOeMjFkCrNysB0PtftME/4iCTjxfaLoNq/IAh5fTqUOMXvryN6Jyka4ZGuMLIg==} + dependencies: + multiformats: 9.9.0 dev: false + /@ipld/dag-pb/3.0.0: + resolution: {integrity: sha512-d9TYsiS/8ixtUnXRkpHU+4kkI1ZUN57n/HjMvK75uhezL1p9+heWryb/rv+Ztlbux9OW9Zus75lMjFoPbG36bw==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + multiformats: 10.0.2 + dev: true + /@ipld/dag-ucan/2.0.1: resolution: {integrity: sha512-0cqnXPmjmFhz9JVtgU/wCaNvbnFr/HYzl4LaVm7Q7c8FsF9u671rOvNgCbSpJL6f+YTM4Q4fihxLYWPHaUSEww==} dependencies: '@ipld/dag-cbor': 8.0.0 '@ipld/dag-json': 9.0.1 multiformats: 10.0.2 + + /@ipld/unixfs/2.0.0: + resolution: {integrity: sha512-Li6ObZWlnQPM8R1O6mjUWQWlxjf+4yjZDERZIvNILOXeTvF0G36WFIdr3c2s9M6Aiez8gCMzodNnJLRXzXnJ0Q==} + dependencies: + '@ipld/dag-pb': 2.1.18 + '@web-std/stream': 1.0.1 + actor: 2.3.1 + multiformats: 10.0.2 + protobufjs: 7.1.2 + rabin-rs: 2.1.0 dev: false /@istanbuljs/schema/0.1.3: @@ -852,6 +917,14 @@ packages: - utf-8-validate dev: true + /@multiformats/murmur3/2.0.0: + resolution: {integrity: sha512-rnmRpmHMMlgnDQEL5IJ8GiNECMrNv0PR5tlmBTDlb9cnx4oSgFQRdqw7a0uqo3ftt/lUbc9cM1pOE/NxZGX7NQ==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + multiformats: 10.0.2 + murmurhash3js-revisited: 3.0.0 + dev: true + /@next/env/12.3.1: resolution: {integrity: sha512-9P9THmRFVKGKt9DYqeC2aKIxm8rlvkK38V1P1sRE7qyoPBIs8l9oo79QoSdPtOWfzkbDAVUqvbQGgTMsb8BtJg==} dev: false @@ -981,7 +1054,6 @@ packages: /@noble/ed25519/1.7.1: resolution: {integrity: sha512-Rk4SkJFaXZiznFyC/t77Q0NKS4FL7TLJJsVG2V2oiEq3kJVeTdxysEe/yRWSpnWMe808XRDJ+VFh5pt/FN5plw==} - dev: false /@nodelib/fs.scandir/2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -1021,6 +1093,39 @@ packages: resolution: {integrity: sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==} dev: true + /@protobufjs/aspromise/1.1.2: + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + /@protobufjs/base64/1.1.2: + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + /@protobufjs/codegen/2.0.4: + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + /@protobufjs/eventemitter/1.1.0: + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + /@protobufjs/fetch/1.1.0: + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + /@protobufjs/float/1.0.2: + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + /@protobufjs/inquire/1.1.0: + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + /@protobufjs/path/1.1.2: + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + /@protobufjs/pool/1.1.0: + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + /@protobufjs/utf8/1.1.0: + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + /@rushstack/eslint-patch/1.2.0: resolution: {integrity: sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==} dev: true @@ -1380,14 +1485,12 @@ packages: '@ipld/dag-ucan': 2.0.1 '@ucanto/interface': 3.0.1 multiformats: 10.0.2 - dev: false /@ucanto/interface/3.0.1: resolution: {integrity: sha512-1UlyLMjJwgzAmhlqu/V1gz0xrE9MUiI3gdzxOJbIXwrS7zAWsbtUZqIS/SPpr1+PYnNO8PHSGyGekb+N0dqtWQ==} dependencies: '@ipld/dag-ucan': 2.0.1 multiformats: 10.0.2 - dev: false /@ucanto/principal/3.0.1: resolution: {integrity: sha512-0U0EF2ddfR55LKKc7FkzTkH8OFvjZvEeRqMkyV5FyeSsFEF7lXhgEXZrNgggK2zlriXjYC9VVCpPhsGb4cI72g==} @@ -1397,7 +1500,6 @@ packages: '@ucanto/interface': 3.0.1 multiformats: 10.0.2 one-webcrypto: 1.0.3 - dev: false /@ucanto/server/3.0.4: resolution: {integrity: sha512-4a5ou5HZ8ymw39EeZ0024nu0JD916xTMpYsS4eXMxpnoWlOjgmoeWZ5e01dEiw5AlfnSa8e9+a0b1dzvSISQxQ==} @@ -1405,7 +1507,6 @@ packages: '@ucanto/core': 3.0.2 '@ucanto/interface': 3.0.1 '@ucanto/validator': 3.0.4 - dev: false /@ucanto/transport/3.0.2: resolution: {integrity: sha512-IyfI26VWPxCL2jnGiGP1i6mZblk8QORHzEVt5t+7Pic2k7pANQHoqbveQveRb9a8z6D/UdFogSPQxWYYtKaxWQ==} @@ -1425,7 +1526,6 @@ packages: '@ucanto/core': 3.0.2 '@ucanto/interface': 3.0.1 multiformats: 10.0.2 - dev: false /@web-std/blob/3.0.4: resolution: {integrity: sha512-+dibyiw+uHYK4dX5cJ7HA+gtDAaUUe6JsOryp2ZpAC7h4ICsh49E34JwHoEKPlPvP0llCrNzz45vvD+xX5QDBg==} @@ -1508,6 +1608,10 @@ packages: hasBin: true dev: true + /actor/2.3.1: + resolution: {integrity: sha512-ST/3wnvcP2tKDXnum7nLCLXm+/rsf8vPocXH2Fre6D8FQwNkGDd4JEitBlXj007VQJfiGYRQvXqwOBZVi+JtRg==} + dev: false + /agent-base/6.0.2: resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} engines: {node: '>= 6.0.0'} @@ -1864,6 +1968,20 @@ packages: resolution: {integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==} dev: true + /blockstore-core/2.0.2: + resolution: {integrity: sha512-ALry3rBp2pTEi4F/usjCJGRluAKYFWI9Np7uE0pZHfDeScMJSj/fDkHEWvY80tPYu4kj03sLKRDGJlZH+V7VzQ==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + err-code: 3.0.1 + interface-blockstore: 3.0.1 + interface-store: 3.0.1 + it-all: 1.0.6 + it-drain: 1.0.5 + it-filter: 1.0.3 + it-take: 1.0.2 + multiformats: 10.0.2 + dev: true + /blueimp-md5/2.19.0: resolution: {integrity: sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==} dev: true @@ -1926,6 +2044,25 @@ packages: dependencies: streamsearch: 1.1.0 + /c8/7.12.0: + resolution: {integrity: sha512-CtgQrHOkyxr5koX1wEUmN/5cfDa2ckbHRA4Gy5LAL0zaCFtVWJS5++n+w4/sr2GWGerBxgTjpKeDclk/Qk6W/A==} + engines: {node: '>=10.12.0'} + hasBin: true + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@istanbuljs/schema': 0.1.3 + find-up: 5.0.0 + foreground-child: 2.0.0 + istanbul-lib-coverage: 3.2.0 + istanbul-lib-report: 3.0.0 + istanbul-reports: 3.1.5 + rimraf: 3.0.2 + test-exclude: 6.0.0 + v8-to-istanbul: 9.0.1 + yargs: 16.2.0 + yargs-parser: 20.2.9 + dev: true + /call-bind/1.0.2: resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} dependencies: @@ -1966,7 +2103,6 @@ packages: /cborg/1.9.5: resolution: {integrity: sha512-fLBv8wmqtlXqy1Yu+pHzevAIkW6k2K0ZtMujNzWphLsA34vzzg9BHn+5GmZqOJkSA9V7EMKsWrf6K976c1QMjQ==} hasBin: true - dev: false /chalk/2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} @@ -2263,6 +2399,17 @@ packages: resolution: {integrity: sha512-izfGgKyzzIyLaeb1EtZ3KbglkS6AKp9cv7LxmiyoOu+fXfol1tQDC0Cof0enVZGNtudTHW+3lfuW9ZkLQss4Wg==} dev: true + /cross-spawn/6.0.5: + resolution: {integrity: sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==} + engines: {node: '>=4.8'} + dependencies: + nice-try: 1.0.5 + path-key: 2.0.1 + semver: 5.7.1 + shebang-command: 1.2.0 + which: 1.3.1 + dev: true + /cross-spawn/7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} @@ -2534,6 +2681,10 @@ packages: engines: {node: '>=6'} dev: false + /err-code/3.0.1: + resolution: {integrity: sha512-GiaH0KJUewYok+eeY05IIgjtAe4Yltygk9Wqp1V5yVWLdhf0hYZchRjNIT9bb0mSwRcIusT3cx7PJUf3zEIfUA==} + dev: true + /error-ex/1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} dependencies: @@ -3797,7 +3948,6 @@ packages: /eventemitter3/4.0.7: resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} - dev: false /events/3.3.0: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} @@ -3846,6 +3996,10 @@ packages: resolution: {integrity: sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==} dev: true + /fast-fifo/1.1.0: + resolution: {integrity: sha512-Kl29QoNbNvn4nhDsLYjyIAaIqaJB6rBx5p3sL9VjaefJ+eMFBWVZiaoguaoZfzEKr5RhAti0UgM8703akGPJ6g==} + dev: true + /fast-glob/3.2.12: resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} engines: {node: '>=8.6.0'} @@ -3948,6 +4102,14 @@ packages: dependencies: is-callable: 1.2.7 + /foreground-child/2.0.0: + resolution: {integrity: sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==} + engines: {node: '>=8.0.0'} + dependencies: + cross-spawn: 7.0.3 + signal-exit: 3.0.7 + dev: true + /fs-constants/1.0.0: resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} dev: true @@ -4143,6 +4305,14 @@ packages: resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==} dev: true + /hamt-sharding/3.0.2: + resolution: {integrity: sha512-f0DzBD2tSmLFdFsLAvOflIBqFPjerbA7BfmwO8mVho/5hXwgyyYhv+ijIzidQf/DpDX3bRjAQvhGoBFj+DBvPw==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + sparse-array: 1.3.2 + uint8arrays: 4.0.2 + dev: true + /has-bigints/1.0.2: resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} dev: true @@ -4229,6 +4399,10 @@ packages: resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} dev: true + /html-escaper/2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + dev: true + /html-rewriter-wasm/0.4.1: resolution: {integrity: sha512-lNovG8CMCCmcVB1Q7xggMSf7tqPCijZXaH4gL6iE8BFghdQCbaY5Met9i1x2Ex8m/cZHDUtXK9H6/znKamRP8Q==} dev: true @@ -4252,6 +4426,13 @@ packages: engines: {node: '>=12.20.0'} dev: true + /hundreds/0.0.9: + resolution: {integrity: sha512-4pHIJQl4SiVeMeg00w6WypEjVrSgVUxQS8YlOWW3KehCdlz/PoEhKlJY2DYeR56lPoF5KA+YjHKgnwhCsKJ7IQ==} + hasBin: true + dependencies: + c8: 7.12.0 + dev: true + /iconv-lite/0.4.24: resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} engines: {node: '>=0.10.0'} @@ -4302,6 +4483,10 @@ packages: wrappy: 1.0.2 dev: true + /inherits/2.0.3: + resolution: {integrity: sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==} + dev: true + /inherits/2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} @@ -4330,6 +4515,19 @@ packages: wrap-ansi: 8.0.1 dev: false + /interface-blockstore/3.0.1: + resolution: {integrity: sha512-yZcLm+ewUbWhvAhvqd+Xbt+w5Sm5SeG0s1HTb0gkGESZVM7MEc1cC5uDRUe6i+X4hEzWO10HCqENbpTgHuWerQ==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + interface-store: 3.0.1 + multiformats: 10.0.2 + dev: true + + /interface-store/3.0.1: + resolution: {integrity: sha512-S5JcwBV+cJorsD0zGKHcBa8A2e578gw9vhZX0QhkV4Xyl4lAMAg5N2GJceUnjCfj/FOKzxTdABzJKPOF2Id8Ig==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dev: true + /internal-slot/1.0.3: resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==} engines: {node: '>= 0.4'} @@ -4344,6 +4542,35 @@ packages: engines: {node: '>= 0.10'} dev: true + /ipfs-unixfs-exporter/9.0.1: + resolution: {integrity: sha512-n/nHhnW9ec4UHI0eQq9VTGgm0+k3FP0OmAFmbICCqwRrmTkgguXOgHb/Z51wWJ/TXvbI5CPz9xqAzG1/lGRyBA==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + '@ipld/dag-cbor': 8.0.0 + '@ipld/dag-pb': 3.0.0 + '@multiformats/murmur3': 2.0.0 + err-code: 3.0.1 + hamt-sharding: 3.0.2 + interface-blockstore: 3.0.1 + ipfs-unixfs: 8.0.0 + it-last: 2.0.0 + it-map: 2.0.0 + it-parallel: 3.0.0 + it-pipe: 2.0.4 + it-pushable: 3.1.0 + multiformats: 10.0.2 + p-queue: 7.3.0 + uint8arrays: 4.0.2 + dev: true + + /ipfs-unixfs/8.0.0: + resolution: {integrity: sha512-PAHtfyjiFs2PZBbeft5QRyXpVOvZ2zsGqID+zVRla7fjC1zRTqJkrGY9h6dF03ldGv/mSmFlNZh479qPC6aZKg==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + err-code: 3.0.1 + protobufjs: 7.1.2 + dev: true + /irregular-plurals/3.3.0: resolution: {integrity: sha512-MVBLKUTangM3EfRPFROhmWQQKRDsrgI83J8GS3jXy+OwYqiR2/aoWndYQ5416jLE3uaGgLH7ncme3X9y09gZ3g==} engines: {node: '>=8'} @@ -4603,6 +4830,90 @@ packages: ws: 8.10.0 dev: false + /istanbul-lib-coverage/3.2.0: + resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} + engines: {node: '>=8'} + dev: true + + /istanbul-lib-report/3.0.0: + resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} + engines: {node: '>=8'} + dependencies: + istanbul-lib-coverage: 3.2.0 + make-dir: 3.1.0 + supports-color: 7.2.0 + dev: true + + /istanbul-reports/3.1.5: + resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} + engines: {node: '>=8'} + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.0 + dev: true + + /it-all/1.0.6: + resolution: {integrity: sha512-3cmCc6Heqe3uWi3CVM/k51fa/XbMFpQVzFoDsV0IZNHSQDyAXl3c4MjHkFX5kF3922OGj7Myv1nSEUgRtcuM1A==} + dev: true + + /it-drain/1.0.5: + resolution: {integrity: sha512-r/GjkiW1bZswC04TNmUnLxa6uovme7KKwPhc+cb1hHU65E3AByypHH6Pm91WHuvqfFsm+9ws0kPtDBV3/8vmIg==} + dev: true + + /it-filter/1.0.3: + resolution: {integrity: sha512-EI3HpzUrKjTH01miLHWmhNWy3Xpbx4OXMXltgrNprL5lDpF3giVpHIouFpr5l+evXw6aOfxhnt01BIB+4VQA+w==} + dev: true + + /it-last/2.0.0: + resolution: {integrity: sha512-u0GHZ01tWYtPvDkOaqZSLLWjFv3IJw9cPL9mbEV7wnE8DOsbVoXIuKpnz3U6pySl5RzPVjTzSHOc961ZYttBxg==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dev: true + + /it-map/2.0.0: + resolution: {integrity: sha512-mLgtk/NZaN7NZ06iLrMXCA6jjhtZO0vZT5Ocsp31H+nsGI18RSPVmUbFyA1sWx7q+g92J22Sixya7T2QSSAwfA==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dev: true + + /it-merge/1.0.4: + resolution: {integrity: sha512-DcL6GksTD2HQ7+5/q3JznXaLNfwjyG3/bObaF98da+oHfUiPmdo64oJlT9J8R8G5sJRU7thwaY5zxoAKCn7FJw==} + dependencies: + it-pushable: 1.4.2 + dev: true + + /it-parallel/3.0.0: + resolution: {integrity: sha512-/y70cY7VoZ7natLbWrPxoRaKWMD67RvtWx21cyLJr6kkuHrUWOrHNr8CPMBqzDRh73aig/uUT82hzTTmTTkDUg==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + p-defer: 4.0.0 + dev: true + + /it-pipe/2.0.4: + resolution: {integrity: sha512-lK0BV0egwfc64DFJva+0Jh1z8UxwmYBpAHDwq21s0OenRCaEDIntx/iOyWH/jg5efBU6Xa8igzmOqm2CPPNDgg==} + engines: {node: '>=16.0.0', npm: '>=7.0.0'} + dependencies: + it-merge: 1.0.4 + it-pushable: 3.1.0 + it-stream-types: 1.0.4 + dev: true + + /it-pushable/1.4.2: + resolution: {integrity: sha512-vVPu0CGRsTI8eCfhMknA7KIBqqGFolbRx+1mbQ6XuZ7YCz995Qj7L4XUviwClFunisDq96FdxzF5FnAbw15afg==} + dependencies: + fast-fifo: 1.1.0 + dev: true + + /it-pushable/3.1.0: + resolution: {integrity: sha512-sEAdT86u6aIWvLkH4hlOmgvHpRyUOUG22HD365H+Dh67zYpaPdILmT4Om7Wjdb+m/SjEB81z3nYCoIrgVYpOFA==} + dev: true + + /it-stream-types/1.0.4: + resolution: {integrity: sha512-0F3CqTIcIHwtnmIgqd03a7sw8BegAmE32N2w7anIGdALea4oAN4ltqPgDMZ7zn4XPLZifXEZlBXSzgg64L1Ebw==} + dev: true + + /it-take/1.0.2: + resolution: {integrity: sha512-u7I6qhhxH7pSevcYNaMECtkvZW365ARqAIt9K+xjdK1B2WUDEjQSfETkOCT8bxFq/59LqrN3cMLUtTgmDBaygw==} + dev: true + /js-sdsl/4.1.5: resolution: {integrity: sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q==} dev: true @@ -4635,6 +4946,10 @@ packages: engines: {node: '>=12.0.0'} dev: true + /json-parse-better-errors/1.0.2: + resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} + dev: true + /json-parse-even-better-errors/2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} dev: true @@ -4754,6 +5069,16 @@ packages: wrap-ansi: 7.0.0 dev: true + /load-json-file/4.0.0: + resolution: {integrity: sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==} + engines: {node: '>=4'} + dependencies: + graceful-fs: 4.2.10 + parse-json: 4.0.0 + pify: 3.0.0 + strip-bom: 3.0.0 + dev: true + /load-json-file/7.0.1: resolution: {integrity: sha512-Gnxj3ev3mB5TkVBGad0JM6dmLiQL+o0t23JPBZ9sd+yvSLk05mFoqKBw5N8gbbkU4TNXyqCgIrl/VM17OgUIgQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -4819,6 +5144,9 @@ packages: wrap-ansi: 6.2.0 dev: true + /long/5.2.1: + resolution: {integrity: sha512-GKSNGeNAtw8IryjjkhZxuKB3JzlcLTwjtiQCHKvqQet81I93kXslhDQruGI/QsddO83mcDToBVy7GqGS/zYf/A==} + /loose-envify/1.4.0: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true @@ -4880,6 +5208,11 @@ packages: mimic-fn: 4.0.0 dev: true + /memorystream/0.3.1: + resolution: {integrity: sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==} + engines: {node: '>= 0.10.0'} + dev: true + /merge-options/3.0.4: resolution: {integrity: sha512-2Sug1+knBjkaMsMgf1ctR1Ujx+Ayku4EdJN4Z+C2+JzoeF7A3OZ9KM2GY0CpQS51NR61LTurMJrRKPhSs3ZRTQ==} engines: {node: '>=10'} @@ -5102,8 +5435,16 @@ packages: /multiformats/10.0.2: resolution: {integrity: sha512-nJEHLFOYhO4L+aNApHhCnWqa31FyqAHv9Q77AhmwU3KsM2f1j7tuJpCk5ByZ33smzycNCpSG5klNIejIyfFx2A==} engines: {node: '>=16.0.0', npm: '>=7.0.0'} + + /multiformats/9.9.0: + resolution: {integrity: sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg==} dev: false + /murmurhash3js-revisited/3.0.0: + resolution: {integrity: sha512-/sF3ee6zvScXMb1XFJ8gDsSnY+X8PbOyjIuBhtgis10W2Jx4ZjIhikUCIF9c4gpJxVnQIsPAFrSwTCuAjicP6g==} + engines: {node: '>=8.0.0'} + dev: true + /mustache/4.2.0: resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} hasBin: true @@ -5191,6 +5532,10 @@ packages: - babel-plugin-macros dev: false + /nice-try/1.0.5: + resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} + dev: true + /node-abi/3.28.0: resolution: {integrity: sha512-fRlDb4I0eLcQeUvGq7IY3xHrSb0c9ummdvDSYWfT9+LKP+3jCKw/tKoqaM7r1BAoiAC6GtwyjaGnOz6B3OtF+A==} engines: {node: '>=10'} @@ -5234,6 +5579,22 @@ packages: engines: {node: '>=0.10.0'} dev: true + /npm-run-all/4.1.5: + resolution: {integrity: sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==} + engines: {node: '>= 4'} + hasBin: true + dependencies: + ansi-styles: 3.2.1 + chalk: 2.4.2 + cross-spawn: 6.0.5 + memorystream: 0.3.1 + minimatch: 3.1.2 + pidtree: 0.3.1 + read-pkg: 3.0.0 + shell-quote: 1.7.4 + string.prototype.padend: 3.1.4 + dev: true + /npm-run-path/5.1.0: resolution: {integrity: sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -5348,7 +5709,6 @@ packages: /one-webcrypto/1.0.3: resolution: {integrity: sha512-fu9ywBVBPx0gS9K0etIROTiCkvI5S1TDjFsYFb3rC1ewFxeOqsbzq7aIMBHsYfrTHBcGXJaONXXjTl8B01cW1Q==} - dev: false /onetime/5.1.2: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} @@ -5402,7 +5762,6 @@ packages: /p-defer/4.0.0: resolution: {integrity: sha512-Vb3QRvQ0Y5XnF40ZUWW7JfLogicVh/EnA5gBIvKDJoYpeI82+1E3AlB9yOcKFS0AhHrWVnAQO39fbR0G99IVEQ==} engines: {node: '>=12'} - dev: false /p-event/4.2.0: resolution: {integrity: sha512-KXatOjCRXXkSePPb1Nbi0p0m+gQAwdlbhi4wQKJPI1HsMQS9g+Sqp2o+QHziPr7eYJyOZet836KoHEVM1mwOrQ==} @@ -5497,7 +5856,6 @@ packages: dependencies: eventemitter3: 4.0.7 p-timeout: 5.1.0 - dev: false /p-retry/5.1.1: resolution: {integrity: sha512-i69WkEU5ZAL8mrmdmVviWwU+DN+IUF8f4sSJThoJ3z5A7Nn5iuO5ROX3Boye0u+uYQLOSfgFl7SuFZCjlAVbQA==} @@ -5546,6 +5904,14 @@ packages: callsites: 3.1.0 dev: true + /parse-json/4.0.0: + resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} + engines: {node: '>=4'} + dependencies: + error-ex: 1.3.2 + json-parse-better-errors: 1.0.2 + dev: true + /parse-json/5.2.0: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} @@ -5588,6 +5954,11 @@ packages: engines: {node: '>=0.10.0'} dev: true + /path-key/2.0.1: + resolution: {integrity: sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==} + engines: {node: '>=4'} + dev: true + /path-key/3.1.1: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} @@ -5606,11 +5977,25 @@ packages: resolution: {integrity: sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==} dev: true + /path-type/3.0.0: + resolution: {integrity: sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==} + engines: {node: '>=4'} + dependencies: + pify: 3.0.0 + dev: true + /path-type/4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} dev: true + /path/0.12.7: + resolution: {integrity: sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q==} + dependencies: + process: 0.11.10 + util: 0.10.4 + dev: true + /picocolors/1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} dev: false @@ -5620,12 +6005,23 @@ packages: engines: {node: '>=8.6'} dev: true + /pidtree/0.3.1: + resolution: {integrity: sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==} + engines: {node: '>=0.10'} + hasBin: true + dev: true + /pidtree/0.6.0: resolution: {integrity: sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==} engines: {node: '>=0.10'} hasBin: true dev: true + /pify/3.0.0: + resolution: {integrity: sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==} + engines: {node: '>=4'} + dev: true + /pkg-conf/4.0.0: resolution: {integrity: sha512-7dmgi4UY4qk+4mj5Cd8v/GExPo0K+SlY+hulOSdfZ/T6jVH6//y7NtzZo5WrfhDBxuQ0jCa7fLZmNaNh7EWL/w==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -5797,6 +6193,24 @@ packages: react-is: 16.13.1 dev: true + /protobufjs/7.1.2: + resolution: {integrity: sha512-4ZPTPkXCdel3+L81yw3dG6+Kq3umdWKh7Dc7GW/CpNk4SX3hK58iPCWeCyhVTDrbkNeKrYNZ7EojM5WDaEWTLQ==} + engines: {node: '>=12.0.0'} + requiresBuild: true + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 18.11.9 + long: 5.2.1 + /proxy-from-env/1.1.0: resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} dev: true @@ -5827,6 +6241,10 @@ packages: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} dev: true + /rabin-rs/2.1.0: + resolution: {integrity: sha512-5y72gAXPzIBsAMHcpxZP8eMDuDT98qMP1BqSDHRbHkJJXEgWIN1lA47LxUqzsK6jknOJtgfkQr9v+7qMlFDm6g==} + dev: false + /randombytes/2.1.0: resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} dependencies: @@ -5873,6 +6291,15 @@ packages: type-fest: 0.8.1 dev: true + /read-pkg/3.0.0: + resolution: {integrity: sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==} + engines: {node: '>=4'} + dependencies: + load-json-file: 4.0.0 + normalize-package-data: 2.5.0 + path-type: 3.0.0 + dev: true + /read-pkg/5.2.0: resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==} engines: {node: '>=8'} @@ -6169,6 +6596,13 @@ packages: resolution: {integrity: sha512-1jeBGaKNGdEq4FgIrORu/N570dwoPYio8lSoYLWmX7sQ//0JY08Xh9o5pBcgmHQ/MbsYp/aZnOe1s1lIsbLprQ==} dev: true + /shebang-command/1.2.0: + resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==} + engines: {node: '>=0.10.0'} + dependencies: + shebang-regex: 1.0.0 + dev: true + /shebang-command/2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} @@ -6176,11 +6610,20 @@ packages: shebang-regex: 3.0.0 dev: true + /shebang-regex/1.0.0: + resolution: {integrity: sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==} + engines: {node: '>=0.10.0'} + dev: true + /shebang-regex/3.0.0: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} dev: true + /shell-quote/1.7.4: + resolution: {integrity: sha512-8o/QEhSSRb1a5i7TFR0iM4G16Z0vYB2OQVs4G3aAFXjn3T6yEx8AZxy1PgDF7I00LZHYA3WxaSYIf5e5sAX8Rw==} + dev: true + /shelljs/0.8.5: resolution: {integrity: sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==} engines: {node: '>=4'} @@ -6296,6 +6739,10 @@ packages: resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==} dev: true + /sparse-array/1.3.2: + resolution: {integrity: sha512-ZT711fePGn3+kQyLuv1fpd3rNSkNF8vd5Kv2D+qnOANeyKs3fx6bUMGWRPvgTTcYV64QMqZKZwcuaQSP3AZ0tg==} + dev: true + /spdx-correct/3.1.1: resolution: {integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==} dependencies: @@ -6412,6 +6859,15 @@ packages: side-channel: 1.0.4 dev: true + /string.prototype.padend/3.1.4: + resolution: {integrity: sha512-67otBXoksdjsnXXRUq+KMVTdlVRZ2af422Y0aTyTjVaoQkGr3mxl2Bc5emi7dOQ3OGVVQQskmLEWwFXwommpNw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.4 + dev: true + /string.prototype.trim/1.2.6: resolution: {integrity: sha512-8lMR2m+U0VJTPp6JjvJTtGyc4FIGq9CdRt7O9p6T0e6K4vjU+OP+SQJpbe/SBmRcCUIvNUnjsbmY6lnMp8MhsQ==} engines: {node: '>= 0.4'} @@ -6773,7 +7229,6 @@ packages: engines: {node: '>=16.0.0', npm: '>=7.0.0'} dependencies: multiformats: 10.0.2 - dev: false /unbox-primitive/1.0.2: resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} @@ -6823,6 +7278,12 @@ packages: /util-deprecate/1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + /util/0.10.4: + resolution: {integrity: sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==} + dependencies: + inherits: 2.0.3 + dev: true + /util/0.12.5: resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==} dependencies: @@ -6857,7 +7318,6 @@ packages: /varint/6.0.0: resolution: {integrity: sha512-cXEIW6cfr15lFv563k4GuVuW/fiwjknytD37jIOLSdSWuOI6WnO/oKwmP2FQTU2l01LP8/M5TSAJpzUaGe3uWg==} - dev: false /watch/1.0.2: resolution: {integrity: sha512-1u+Z5n9Jc1E2c7qDO8SinPoZuHj7FgbgU1olSFoyaklduDvvtX7GMMtlE6OC9FTXq4KvNAOfj6Zu4vI1e9bAKA==} @@ -6941,6 +7401,13 @@ packages: has-tostringtag: 1.0.0 is-typed-array: 1.1.10 + /which/1.3.1: + resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + /which/2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} @@ -7082,6 +7549,11 @@ packages: engines: {node: '>=10'} dev: true + /yargs-parser/20.2.9: + resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} + engines: {node: '>=10'} + dev: true + /yargs-parser/21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'}