diff --git a/.dockerignore b/.dockerignore
deleted file mode 100644
index d1e7958..0000000
--- a/.dockerignore
+++ /dev/null
@@ -1,2 +0,0 @@
-**
-!/dist
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 9dc4acb..0000000
--- a/Dockerfile
+++ /dev/null
@@ -1,5 +0,0 @@
-# Just enough docker until github gets a new node16 runner
-# see: https://github.com/actions/runner/issues/772
-FROM node:16-alpine
-COPY dist dist
-ENTRYPOINT [ "node", "/dist/index.js" ]
diff --git a/README.md b/README.md
index fa41bbf..82d3c8b 100644
--- a/README.md
+++ b/README.md
@@ -64,8 +64,6 @@ e.g. `https://dweb.link/ipfs/bafkreicysg23kiwv34eg2d7qweipxwosdo2py4ldv42nbaugul
💌 Considerate contributions welcome!
-*Of note* This is supposed to be a Javascript flavour GitHub Action, but the JS runner is [stuck on node12](https://github.com/actions/runner/issues/772v), and we need at least node14. Until the glorious future where the current node version is supported, we wrap the action in a container.
-
-The `dist` folder is commited to the repo as is the curious cultural norm with JS actions, as the repo is the delivery mechanism, so to spare some cycles for the user users, all the deps are bundled into a single /dist/index.js monolith. This no longer makes much sense as we're also wrapping it in a container, but the dream is that the new node16 runner lands, and we can just delete the Dockers and move on.
+The `dist` folder is commited to the repo as is the curious cultural norm with JS actions, as the repo is the delivery mechanism, so to spare some cycles for the user users, all the deps are bundled into a single /dist/index.js monolith.
diff --git a/action.yml b/action.yml
index be62873..9f07f63 100644
--- a/action.yml
+++ b/action.yml
@@ -23,9 +23,5 @@ outputs:
cid:
description: 'The IPFS Content ID for the directory'
runs:
- # TODO: we need node14.14 minimum.
- # https://github.com/actions/runner/issues/772
- # using: 'node12'
- # main: 'dist/index.js'
- using: 'docker'
- image: 'Dockerfile'
+ using: 'node16'
+ main: 'dist/index.js'
diff --git a/dist/index.js b/dist/index.js
index 3756717..803bfd9 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -134,12 +134,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
+exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
const command_1 = __nccwpck_require__(7351);
const file_command_1 = __nccwpck_require__(717);
const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(2087));
const path = __importStar(__nccwpck_require__(5622));
+const oidc_utils_1 = __nccwpck_require__(8041);
/**
* The code to exit an action
*/
@@ -312,19 +313,30 @@ exports.debug = debug;
/**
* Adds an error issue
* @param message error issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
*/
-function error(message) {
- command_1.issue('error', message instanceof Error ? message.toString() : message);
+function error(message, properties = {}) {
+ command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports.error = error;
/**
- * Adds an warning issue
+ * Adds a warning issue
* @param message warning issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
*/
-function warning(message) {
- command_1.issue('warning', message instanceof Error ? message.toString() : message);
+function warning(message, properties = {}) {
+ command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports.warning = warning;
+/**
+ * Adds a notice issue
+ * @param message notice issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function notice(message, properties = {}) {
+ command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+exports.notice = notice;
/**
* Writes info to log with console.log.
* @param message info message
@@ -397,6 +409,12 @@ function getState(name) {
return process.env[`STATE_${name}`] || '';
}
exports.getState = getState;
+function getIDToken(aud) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return yield oidc_utils_1.OidcClient.getIDToken(aud);
+ });
+}
+exports.getIDToken = getIDToken;
//# sourceMappingURL=core.js.map
/***/ }),
@@ -450,6 +468,90 @@ exports.issueCommand = issueCommand;
/***/ }),
+/***/ 8041:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.OidcClient = void 0;
+const http_client_1 = __nccwpck_require__(9925);
+const auth_1 = __nccwpck_require__(3702);
+const core_1 = __nccwpck_require__(2186);
+class OidcClient {
+ static createHttpClient(allowRetry = true, maxRetry = 10) {
+ const requestOptions = {
+ allowRetries: allowRetry,
+ maxRetries: maxRetry
+ };
+ return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
+ }
+ static getRequestToken() {
+ const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
+ if (!token) {
+ throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
+ }
+ return token;
+ }
+ static getIDTokenUrl() {
+ const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
+ if (!runtimeUrl) {
+ throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
+ }
+ return runtimeUrl;
+ }
+ static getCall(id_token_url) {
+ var _a;
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpclient = OidcClient.createHttpClient();
+ const res = yield httpclient
+ .getJson(id_token_url)
+ .catch(error => {
+ throw new Error(`Failed to get ID Token. \n
+ Error Code : ${error.statusCode}\n
+ Error Message: ${error.result.message}`);
+ });
+ const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
+ if (!id_token) {
+ throw new Error('Response json body do not have ID Token field');
+ }
+ return id_token;
+ });
+ }
+ static getIDToken(audience) {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ // New ID Token is requested from action service
+ let id_token_url = OidcClient.getIDTokenUrl();
+ if (audience) {
+ const encodedAudience = encodeURIComponent(audience);
+ id_token_url = `${id_token_url}&audience=${encodedAudience}`;
+ }
+ core_1.debug(`ID token url is ${id_token_url}`);
+ const id_token = yield OidcClient.getCall(id_token_url);
+ core_1.setSecret(id_token);
+ return id_token;
+ }
+ catch (error) {
+ throw new Error(`Error message: ${error.message}`);
+ }
+ });
+ }
+}
+exports.OidcClient = OidcClient;
+//# sourceMappingURL=oidc-utils.js.map
+
+/***/ }),
+
/***/ 5278:
/***/ ((__unused_webpack_module, exports) => {
@@ -458,7 +560,7 @@ exports.issueCommand = issueCommand;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.toCommandValue = void 0;
+exports.toCommandProperties = exports.toCommandValue = void 0;
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
@@ -473,8 +575,704 @@ function toCommandValue(input) {
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
+/**
+ *
+ * @param annotationProperties
+ * @returns The command properties to send with the actual annotation command
+ * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
+ */
+function toCommandProperties(annotationProperties) {
+ if (!Object.keys(annotationProperties).length) {
+ return {};
+ }
+ return {
+ title: annotationProperties.title,
+ file: annotationProperties.file,
+ line: annotationProperties.startLine,
+ endLine: annotationProperties.endLine,
+ col: annotationProperties.startColumn,
+ endColumn: annotationProperties.endColumn
+ };
+}
+exports.toCommandProperties = toCommandProperties;
//# sourceMappingURL=utils.js.map
+/***/ }),
+
+/***/ 3702:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+class BasicCredentialHandler {
+ constructor(username, password) {
+ this.username = username;
+ this.password = password;
+ }
+ prepareRequest(options) {
+ options.headers['Authorization'] =
+ 'Basic ' +
+ Buffer.from(this.username + ':' + this.password).toString('base64');
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication(response) {
+ return false;
+ }
+ handleAuthentication(httpClient, requestInfo, objs) {
+ return null;
+ }
+}
+exports.BasicCredentialHandler = BasicCredentialHandler;
+class BearerCredentialHandler {
+ constructor(token) {
+ this.token = token;
+ }
+ // currently implements pre-authorization
+ // TODO: support preAuth = false where it hooks on 401
+ prepareRequest(options) {
+ options.headers['Authorization'] = 'Bearer ' + this.token;
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication(response) {
+ return false;
+ }
+ handleAuthentication(httpClient, requestInfo, objs) {
+ return null;
+ }
+}
+exports.BearerCredentialHandler = BearerCredentialHandler;
+class PersonalAccessTokenCredentialHandler {
+ constructor(token) {
+ this.token = token;
+ }
+ // currently implements pre-authorization
+ // TODO: support preAuth = false where it hooks on 401
+ prepareRequest(options) {
+ options.headers['Authorization'] =
+ 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication(response) {
+ return false;
+ }
+ handleAuthentication(httpClient, requestInfo, objs) {
+ return null;
+ }
+}
+exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
+
+
+/***/ }),
+
+/***/ 9925:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const http = __nccwpck_require__(8605);
+const https = __nccwpck_require__(7211);
+const pm = __nccwpck_require__(6443);
+let tunnel;
+var HttpCodes;
+(function (HttpCodes) {
+ HttpCodes[HttpCodes["OK"] = 200] = "OK";
+ HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
+ HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
+ HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
+ HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
+ HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
+ HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
+ HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
+ HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
+ HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
+ HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
+ HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
+ HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
+ HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
+ HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
+ HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
+ HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
+ HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
+ HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
+ HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
+ HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
+ HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
+ HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
+ HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
+ HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
+ HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
+ HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
+})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
+var Headers;
+(function (Headers) {
+ Headers["Accept"] = "accept";
+ Headers["ContentType"] = "content-type";
+})(Headers = exports.Headers || (exports.Headers = {}));
+var MediaTypes;
+(function (MediaTypes) {
+ MediaTypes["ApplicationJson"] = "application/json";
+})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
+/**
+ * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+function getProxyUrl(serverUrl) {
+ let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
+ return proxyUrl ? proxyUrl.href : '';
+}
+exports.getProxyUrl = getProxyUrl;
+const HttpRedirectCodes = [
+ HttpCodes.MovedPermanently,
+ HttpCodes.ResourceMoved,
+ HttpCodes.SeeOther,
+ HttpCodes.TemporaryRedirect,
+ HttpCodes.PermanentRedirect
+];
+const HttpResponseRetryCodes = [
+ HttpCodes.BadGateway,
+ HttpCodes.ServiceUnavailable,
+ HttpCodes.GatewayTimeout
+];
+const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
+const ExponentialBackoffCeiling = 10;
+const ExponentialBackoffTimeSlice = 5;
+class HttpClientError extends Error {
+ constructor(message, statusCode) {
+ super(message);
+ this.name = 'HttpClientError';
+ this.statusCode = statusCode;
+ Object.setPrototypeOf(this, HttpClientError.prototype);
+ }
+}
+exports.HttpClientError = HttpClientError;
+class HttpClientResponse {
+ constructor(message) {
+ this.message = message;
+ }
+ readBody() {
+ return new Promise(async (resolve, reject) => {
+ let output = Buffer.alloc(0);
+ this.message.on('data', (chunk) => {
+ output = Buffer.concat([output, chunk]);
+ });
+ this.message.on('end', () => {
+ resolve(output.toString());
+ });
+ });
+ }
+}
+exports.HttpClientResponse = HttpClientResponse;
+function isHttps(requestUrl) {
+ let parsedUrl = new URL(requestUrl);
+ return parsedUrl.protocol === 'https:';
+}
+exports.isHttps = isHttps;
+class HttpClient {
+ constructor(userAgent, handlers, requestOptions) {
+ this._ignoreSslError = false;
+ this._allowRedirects = true;
+ this._allowRedirectDowngrade = false;
+ this._maxRedirects = 50;
+ this._allowRetries = false;
+ this._maxRetries = 1;
+ this._keepAlive = false;
+ this._disposed = false;
+ this.userAgent = userAgent;
+ this.handlers = handlers || [];
+ this.requestOptions = requestOptions;
+ if (requestOptions) {
+ if (requestOptions.ignoreSslError != null) {
+ this._ignoreSslError = requestOptions.ignoreSslError;
+ }
+ this._socketTimeout = requestOptions.socketTimeout;
+ if (requestOptions.allowRedirects != null) {
+ this._allowRedirects = requestOptions.allowRedirects;
+ }
+ if (requestOptions.allowRedirectDowngrade != null) {
+ this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
+ }
+ if (requestOptions.maxRedirects != null) {
+ this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
+ }
+ if (requestOptions.keepAlive != null) {
+ this._keepAlive = requestOptions.keepAlive;
+ }
+ if (requestOptions.allowRetries != null) {
+ this._allowRetries = requestOptions.allowRetries;
+ }
+ if (requestOptions.maxRetries != null) {
+ this._maxRetries = requestOptions.maxRetries;
+ }
+ }
+ }
+ options(requestUrl, additionalHeaders) {
+ return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
+ }
+ get(requestUrl, additionalHeaders) {
+ return this.request('GET', requestUrl, null, additionalHeaders || {});
+ }
+ del(requestUrl, additionalHeaders) {
+ return this.request('DELETE', requestUrl, null, additionalHeaders || {});
+ }
+ post(requestUrl, data, additionalHeaders) {
+ return this.request('POST', requestUrl, data, additionalHeaders || {});
+ }
+ patch(requestUrl, data, additionalHeaders) {
+ return this.request('PATCH', requestUrl, data, additionalHeaders || {});
+ }
+ put(requestUrl, data, additionalHeaders) {
+ return this.request('PUT', requestUrl, data, additionalHeaders || {});
+ }
+ head(requestUrl, additionalHeaders) {
+ return this.request('HEAD', requestUrl, null, additionalHeaders || {});
+ }
+ sendStream(verb, requestUrl, stream, additionalHeaders) {
+ return this.request(verb, requestUrl, stream, additionalHeaders);
+ }
+ /**
+ * Gets a typed object from an endpoint
+ * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
+ */
+ async getJson(requestUrl, additionalHeaders = {}) {
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ let res = await this.get(requestUrl, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async postJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.post(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async putJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.put(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ async patchJson(requestUrl, obj, additionalHeaders = {}) {
+ let data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ let res = await this.patch(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ }
+ /**
+ * Makes a raw http request.
+ * All other methods such as get, post, patch, and request ultimately call this.
+ * Prefer get, del, post and patch
+ */
+ async request(verb, requestUrl, data, headers) {
+ if (this._disposed) {
+ throw new Error('Client has already been disposed.');
+ }
+ let parsedUrl = new URL(requestUrl);
+ let info = this._prepareRequest(verb, parsedUrl, headers);
+ // Only perform retries on reads since writes may not be idempotent.
+ let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
+ ? this._maxRetries + 1
+ : 1;
+ let numTries = 0;
+ let response;
+ while (numTries < maxTries) {
+ response = await this.requestRaw(info, data);
+ // Check if it's an authentication challenge
+ if (response &&
+ response.message &&
+ response.message.statusCode === HttpCodes.Unauthorized) {
+ let authenticationHandler;
+ for (let i = 0; i < this.handlers.length; i++) {
+ if (this.handlers[i].canHandleAuthentication(response)) {
+ authenticationHandler = this.handlers[i];
+ break;
+ }
+ }
+ if (authenticationHandler) {
+ return authenticationHandler.handleAuthentication(this, info, data);
+ }
+ else {
+ // We have received an unauthorized response but have no handlers to handle it.
+ // Let the response return to the caller.
+ return response;
+ }
+ }
+ let redirectsRemaining = this._maxRedirects;
+ while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
+ this._allowRedirects &&
+ redirectsRemaining > 0) {
+ const redirectUrl = response.message.headers['location'];
+ if (!redirectUrl) {
+ // if there's no location to redirect to, we won't
+ break;
+ }
+ let parsedRedirectUrl = new URL(redirectUrl);
+ if (parsedUrl.protocol == 'https:' &&
+ parsedUrl.protocol != parsedRedirectUrl.protocol &&
+ !this._allowRedirectDowngrade) {
+ throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
+ }
+ // we need to finish reading the response before reassigning response
+ // which will leak the open socket.
+ await response.readBody();
+ // strip authorization header if redirected to a different hostname
+ if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
+ for (let header in headers) {
+ // header names are case insensitive
+ if (header.toLowerCase() === 'authorization') {
+ delete headers[header];
+ }
+ }
+ }
+ // let's make the request with the new redirectUrl
+ info = this._prepareRequest(verb, parsedRedirectUrl, headers);
+ response = await this.requestRaw(info, data);
+ redirectsRemaining--;
+ }
+ if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
+ // If not a retry code, return immediately instead of retrying
+ return response;
+ }
+ numTries += 1;
+ if (numTries < maxTries) {
+ await response.readBody();
+ await this._performExponentialBackoff(numTries);
+ }
+ }
+ return response;
+ }
+ /**
+ * Needs to be called if keepAlive is set to true in request options.
+ */
+ dispose() {
+ if (this._agent) {
+ this._agent.destroy();
+ }
+ this._disposed = true;
+ }
+ /**
+ * Raw request.
+ * @param info
+ * @param data
+ */
+ requestRaw(info, data) {
+ return new Promise((resolve, reject) => {
+ let callbackForResult = function (err, res) {
+ if (err) {
+ reject(err);
+ }
+ resolve(res);
+ };
+ this.requestRawWithCallback(info, data, callbackForResult);
+ });
+ }
+ /**
+ * Raw request with callback.
+ * @param info
+ * @param data
+ * @param onResult
+ */
+ requestRawWithCallback(info, data, onResult) {
+ let socket;
+ if (typeof data === 'string') {
+ info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
+ }
+ let callbackCalled = false;
+ let handleResult = (err, res) => {
+ if (!callbackCalled) {
+ callbackCalled = true;
+ onResult(err, res);
+ }
+ };
+ let req = info.httpModule.request(info.options, (msg) => {
+ let res = new HttpClientResponse(msg);
+ handleResult(null, res);
+ });
+ req.on('socket', sock => {
+ socket = sock;
+ });
+ // If we ever get disconnected, we want the socket to timeout eventually
+ req.setTimeout(this._socketTimeout || 3 * 60000, () => {
+ if (socket) {
+ socket.end();
+ }
+ handleResult(new Error('Request timeout: ' + info.options.path), null);
+ });
+ req.on('error', function (err) {
+ // err has statusCode property
+ // res should have headers
+ handleResult(err, null);
+ });
+ if (data && typeof data === 'string') {
+ req.write(data, 'utf8');
+ }
+ if (data && typeof data !== 'string') {
+ data.on('close', function () {
+ req.end();
+ });
+ data.pipe(req);
+ }
+ else {
+ req.end();
+ }
+ }
+ /**
+ * Gets an http agent. This function is useful when you need an http agent that handles
+ * routing through a proxy server - depending upon the url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+ getAgent(serverUrl) {
+ let parsedUrl = new URL(serverUrl);
+ return this._getAgent(parsedUrl);
+ }
+ _prepareRequest(method, requestUrl, headers) {
+ const info = {};
+ info.parsedUrl = requestUrl;
+ const usingSsl = info.parsedUrl.protocol === 'https:';
+ info.httpModule = usingSsl ? https : http;
+ const defaultPort = usingSsl ? 443 : 80;
+ info.options = {};
+ info.options.host = info.parsedUrl.hostname;
+ info.options.port = info.parsedUrl.port
+ ? parseInt(info.parsedUrl.port)
+ : defaultPort;
+ info.options.path =
+ (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
+ info.options.method = method;
+ info.options.headers = this._mergeHeaders(headers);
+ if (this.userAgent != null) {
+ info.options.headers['user-agent'] = this.userAgent;
+ }
+ info.options.agent = this._getAgent(info.parsedUrl);
+ // gives handlers an opportunity to participate
+ if (this.handlers) {
+ this.handlers.forEach(handler => {
+ handler.prepareRequest(info.options);
+ });
+ }
+ return info;
+ }
+ _mergeHeaders(headers) {
+ const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+ if (this.requestOptions && this.requestOptions.headers) {
+ return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
+ }
+ return lowercaseKeys(headers || {});
+ }
+ _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
+ const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+ let clientHeader;
+ if (this.requestOptions && this.requestOptions.headers) {
+ clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
+ }
+ return additionalHeaders[header] || clientHeader || _default;
+ }
+ _getAgent(parsedUrl) {
+ let agent;
+ let proxyUrl = pm.getProxyUrl(parsedUrl);
+ let useProxy = proxyUrl && proxyUrl.hostname;
+ if (this._keepAlive && useProxy) {
+ agent = this._proxyAgent;
+ }
+ if (this._keepAlive && !useProxy) {
+ agent = this._agent;
+ }
+ // if agent is already assigned use that agent.
+ if (!!agent) {
+ return agent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ let maxSockets = 100;
+ if (!!this.requestOptions) {
+ maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
+ }
+ if (useProxy) {
+ // If using proxy, need tunnel
+ if (!tunnel) {
+ tunnel = __nccwpck_require__(4294);
+ }
+ const agentOptions = {
+ maxSockets: maxSockets,
+ keepAlive: this._keepAlive,
+ proxy: {
+ ...((proxyUrl.username || proxyUrl.password) && {
+ proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
+ }),
+ host: proxyUrl.hostname,
+ port: proxyUrl.port
+ }
+ };
+ let tunnelAgent;
+ const overHttps = proxyUrl.protocol === 'https:';
+ if (usingSsl) {
+ tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
+ }
+ else {
+ tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
+ }
+ agent = tunnelAgent(agentOptions);
+ this._proxyAgent = agent;
+ }
+ // if reusing agent across request and tunneling agent isn't assigned create a new agent
+ if (this._keepAlive && !agent) {
+ const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
+ agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
+ this._agent = agent;
+ }
+ // if not using private agent and tunnel agent isn't setup then use global agent
+ if (!agent) {
+ agent = usingSsl ? https.globalAgent : http.globalAgent;
+ }
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ agent.options = Object.assign(agent.options || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return agent;
+ }
+ _performExponentialBackoff(retryNumber) {
+ retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
+ const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
+ return new Promise(resolve => setTimeout(() => resolve(), ms));
+ }
+ static dateTimeDeserializer(key, value) {
+ if (typeof value === 'string') {
+ let a = new Date(value);
+ if (!isNaN(a.valueOf())) {
+ return a;
+ }
+ }
+ return value;
+ }
+ async _processResponse(res, options) {
+ return new Promise(async (resolve, reject) => {
+ const statusCode = res.message.statusCode;
+ const response = {
+ statusCode: statusCode,
+ result: null,
+ headers: {}
+ };
+ // not found leads to null obj returned
+ if (statusCode == HttpCodes.NotFound) {
+ resolve(response);
+ }
+ let obj;
+ let contents;
+ // get the result from the body
+ try {
+ contents = await res.readBody();
+ if (contents && contents.length > 0) {
+ if (options && options.deserializeDates) {
+ obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
+ }
+ else {
+ obj = JSON.parse(contents);
+ }
+ response.result = obj;
+ }
+ response.headers = res.message.headers;
+ }
+ catch (err) {
+ // Invalid resource (contents not json); leaving result obj null
+ }
+ // note that 3xx redirects are handled by the http layer.
+ if (statusCode > 299) {
+ let msg;
+ // if exception/error in body, attempt to get better error
+ if (obj && obj.message) {
+ msg = obj.message;
+ }
+ else if (contents && contents.length > 0) {
+ // it may be the case that the exception is in the body message as string
+ msg = contents;
+ }
+ else {
+ msg = 'Failed request: (' + statusCode + ')';
+ }
+ let err = new HttpClientError(msg, statusCode);
+ err.result = response.result;
+ reject(err);
+ }
+ else {
+ resolve(response);
+ }
+ });
+ }
+}
+exports.HttpClient = HttpClient;
+
+
+/***/ }),
+
+/***/ 6443:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+function getProxyUrl(reqUrl) {
+ let usingSsl = reqUrl.protocol === 'https:';
+ let proxyUrl;
+ if (checkBypass(reqUrl)) {
+ return proxyUrl;
+ }
+ let proxyVar;
+ if (usingSsl) {
+ proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
+ }
+ else {
+ proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
+ }
+ if (proxyVar) {
+ proxyUrl = new URL(proxyVar);
+ }
+ return proxyUrl;
+}
+exports.getProxyUrl = getProxyUrl;
+function checkBypass(reqUrl) {
+ if (!reqUrl.hostname) {
+ return false;
+ }
+ let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
+ if (!noProxy) {
+ return false;
+ }
+ // Determine the request port
+ let reqPort;
+ if (reqUrl.port) {
+ reqPort = Number(reqUrl.port);
+ }
+ else if (reqUrl.protocol === 'http:') {
+ reqPort = 80;
+ }
+ else if (reqUrl.protocol === 'https:') {
+ reqPort = 443;
+ }
+ // Format the request hostname and hostname with port
+ let upperReqHosts = [reqUrl.hostname.toUpperCase()];
+ if (typeof reqPort === 'number') {
+ upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
+ }
+ // Compare request host against noproxy
+ for (let upperNoProxyItem of noProxy
+ .split(',')
+ .map(x => x.trim().toUpperCase())
+ .filter(x => x)) {
+ if (upperReqHosts.some(x => x === upperNoProxyItem)) {
+ return true;
+ }
+ }
+ return false;
+}
+exports.checkBypass = checkBypass;
+
+
/***/ }),
/***/ 1748:
@@ -2749,6 +3547,47 @@ exports.prepare = prepare;
exports.validate = validate;
+/***/ }),
+
+/***/ 6063:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+var hasher = __nccwpck_require__(92);
+var multiformats = __nccwpck_require__(5978);
+var mur = __nccwpck_require__(7214);
+
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
+
+var mur__default = /*#__PURE__*/_interopDefaultLegacy(mur);
+
+function fromNumberTo32BitBuf(number) {
+ const bytes = new Array(4);
+ for (let i = 0; i < 4; i++) {
+ bytes[i] = number & 255;
+ number = number >> 8;
+ }
+ return new Uint8Array(bytes);
+}
+const murmur332 = hasher.from({
+ name: 'murmur3-32',
+ code: 35,
+ encode: input => fromNumberTo32BitBuf(mur__default["default"].x86.hash32(input))
+});
+const murmur3128 = hasher.from({
+ name: 'murmur3-128',
+ code: 34,
+ encode: input => multiformats.bytes.fromHex(mur__default["default"].x64.hash128(input))
+});
+
+exports.murmur3128 = murmur3128;
+exports.murmur332 = murmur332;
+
+
/***/ }),
/***/ 252:
@@ -3665,6 +4504,7 @@ const browserReadableStreamToIt = __nccwpck_require__(664)
*/
function blobToIt (blob) {
if (typeof blob.stream === 'function') {
+ // @ts-ignore missing some properties
return browserReadableStreamToIt(blob.stream())
}
@@ -4688,6 +5528,8 @@ const MINOR_UNDEFINED = 23;
function decodeUndefined(_data, _pos, _minor, options) {
if (options.allowUndefined === false) {
throw new Error(`${ common.decodeErrPrefix } undefined values are not supported`);
+ } else if (options.coerceUndefinedToNull === true) {
+ return new token.Token(token.Type.null, null, 1);
}
return new token.Token(token.Type.undefined, undefined, 1);
}
@@ -6126,12 +6968,30 @@ var errCode = __nccwpck_require__(2997);
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-var Path__default = /*#__PURE__*/_interopDefaultLegacy(Path);
-var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
-var glob__default = /*#__PURE__*/_interopDefaultLegacy(glob);
-var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
-
-async function getFilesFromPath(paths, options) {
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () { return e[k]; }
+ });
+ }
+ });
+ }
+ n["default"] = e;
+ return Object.freeze(n);
+}
+
+var Path__default = /*#__PURE__*/_interopDefaultLegacy(Path);
+var fs__namespace = /*#__PURE__*/_interopNamespace(fs);
+var glob__default = /*#__PURE__*/_interopDefaultLegacy(glob);
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+
+async function getFilesFromPath(paths, options) {
const files = [];
for await (const file of filesFromPath(paths, options)) {
files.push(file);
@@ -6153,11 +7013,11 @@ async function* filesFromPath(paths, options) {
};
for await (const path of paths) {
if (typeof path !== 'string') {
- throw errCode__default['default'](new Error('Path must be a string'), 'ERR_INVALID_PATH', { path });
+ throw errCode__default["default"](new Error('Path must be a string'), 'ERR_INVALID_PATH', { path });
}
- const absolutePath = Path__default['default'].resolve(process.cwd(), path);
- const stat = await fs.promises.stat(absolutePath);
- const prefix = Path__default['default'].dirname(absolutePath);
+ const absolutePath = Path__default["default"].resolve(process.cwd(), path);
+ const stat = await fs__namespace.promises.stat(absolutePath);
+ const prefix = Path__default["default"].dirname(absolutePath);
let mode = options.mode;
if (options.preserveMode) {
mode = stat.mode;
@@ -6180,11 +7040,11 @@ async function* filesFromPath(paths, options) {
}
async function* toGlobSource({path, type, prefix, mode, mtime, size, preserveMode, preserveMtime}, options) {
options = options || {};
- const baseName = Path__default['default'].basename(path);
+ const baseName = Path__default["default"].basename(path);
if (type === 'file') {
yield {
name: `/${ baseName.replace(prefix, '') }`,
- stream: () => fs__default['default'].createReadStream(Path__default['default'].isAbsolute(path) ? path : Path__default['default'].join(process.cwd(), path)),
+ stream: () => fs__namespace.createReadStream(Path__default["default"].isAbsolute(path) ? path : Path__default["default"].join(process.cwd(), path)),
mode,
mtime,
size
@@ -6197,8 +7057,8 @@ async function* toGlobSource({path, type, prefix, mode, mtime, size, preserveMod
realpath: false,
absolute: true
});
- for await (const p of glob__default['default'](path, '**/*', globOptions)) {
- const stat = await fs.promises.stat(p);
+ for await (const p of glob__default["default"](path, '**/*', globOptions)) {
+ const stat = await fs__namespace.promises.stat(p);
if (!stat.isFile()) {
continue;
}
@@ -6212,7 +7072,7 @@ async function* toGlobSource({path, type, prefix, mode, mtime, size, preserveMod
}
yield {
name: toPosix(p.replace(prefix, '')),
- stream: () => fs__default['default'].createReadStream(p),
+ stream: () => fs__namespace.createReadStream(p),
mode,
mtime,
size: stat.size
@@ -6235,7 +7095,7 @@ exports.getFilesFromPath = getFilesFromPath;
// @ts-ignore
const SparseArray = __nccwpck_require__(1128)
-const uint8ArrayFromString = __nccwpck_require__(828)
+const { fromString: uint8ArrayFromString } = __nccwpck_require__(3538)
/**
* @typedef {import('./consumable-hash').InfiniteHash} InfiniteHash
@@ -6721,7 +7581,7 @@ function maskFor (start, length) {
const ConsumableBuffer = __nccwpck_require__(6514)
-const uint8ArrayConcat = __nccwpck_require__(7952)
+const { concat: uint8ArrayConcat } = __nccwpck_require__(5114)
/**
* @param {(value: Uint8Array) => Promise} hashFn
@@ -7307,6 +8167,20 @@ class FsBlockStore extends interface_blockstore_1.BlockstoreAdapter {
const bytes = await fs_1.default.promises.readFile(location);
return bytes;
}
+ async has(cid) {
+ if (!this._opened) {
+ await this._open();
+ }
+ const cidStr = cid.toString();
+ const location = `${this.path}/${cidStr}`;
+ try {
+ await fs_1.default.promises.access(location);
+ return true;
+ }
+ catch (err) {
+ return false;
+ }
+ }
async *blocks() {
if (!this._opened) {
await this._open();
@@ -7360,6 +8234,9 @@ class MemoryBlockStore extends interface_blockstore_1.BlockstoreAdapter {
}
return Promise.resolve(bytes);
}
+ has(cid) {
+ return Promise.resolve(this.store.has(cid.toString()));
+ }
close() {
this.store.clear();
return Promise.resolve();
@@ -7384,7 +8261,8 @@ exports.unixfsImporterOptionsDefault = {
maxChunkSize: 262144,
hasher: sha2_1.sha256,
rawLeaves: true,
- wrapWithDirectory: true
+ wrapWithDirectory: true,
+ maxChildrenPerNode: 174
};
@@ -7403,33 +8281,60 @@ exports.pack = void 0;
const it_last_1 = __importDefault(__nccwpck_require__(7123));
const it_pipe_1 = __importDefault(__nccwpck_require__(7185));
const car_1 = __nccwpck_require__(2805);
-const ipfs_unixfs_importer_1 = __nccwpck_require__(1333);
-const index_js_1 = __importDefault(__nccwpck_require__(4369));
+const ipfs_unixfs_importer_1 = __nccwpck_require__(1626);
+// @ts-ignore
+const index_js_1 = __nccwpck_require__(4369);
const memory_1 = __nccwpck_require__(7913);
const constants_1 = __nccwpck_require__(1563);
-async function pack({ input, blockstore: userBlockstore, maxChunkSize, wrapWithDirectory }) {
+async function pack({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory }) {
if (!input || (Array.isArray(input) && !input.length)) {
throw new Error('missing input file(s)');
}
const blockstore = userBlockstore ? userBlockstore : new memory_1.MemoryBlockStore();
// Consume the source
- const rootEntry = await it_last_1.default(it_pipe_1.default(index_js_1.default(input), (source) => ipfs_unixfs_importer_1.importer(source, blockstore, {
+ const rootEntry = await (0, it_last_1.default)((0, it_pipe_1.default)((0, index_js_1.normaliseInput)(input), (source) => (0, ipfs_unixfs_importer_1.importer)(source, blockstore, {
...constants_1.unixfsImporterOptionsDefault,
+ hasher: hasher || constants_1.unixfsImporterOptionsDefault.hasher,
maxChunkSize: maxChunkSize || constants_1.unixfsImporterOptionsDefault.maxChunkSize,
+ maxChildrenPerNode: maxChildrenPerNode || constants_1.unixfsImporterOptionsDefault.maxChildrenPerNode,
wrapWithDirectory: wrapWithDirectory === false ? false : constants_1.unixfsImporterOptionsDefault.wrapWithDirectory
})));
if (!rootEntry || !rootEntry.cid) {
throw new Error('given input could not be parsed correctly');
}
const root = rootEntry.cid;
- const { writer, out } = await car_1.CarWriter.create([root]);
- for await (const block of blockstore.blocks()) {
- writer.put(block);
- }
- writer.close();
- if (!userBlockstore) {
- await blockstore.close();
- }
+ const { writer, out: carOut } = await car_1.CarWriter.create([root]);
+ const carOutIter = carOut[Symbol.asyncIterator]();
+ let writingPromise;
+ const writeAll = async () => {
+ for await (const block of blockstore.blocks()) {
+ // `await` will block until all bytes in `carOut` are consumed by the user
+ // so we have backpressure here
+ await writer.put(block);
+ }
+ await writer.close();
+ if (!userBlockstore) {
+ await blockstore.close();
+ }
+ };
+ const out = {
+ [Symbol.asyncIterator]() {
+ if (writingPromise != null) {
+ throw new Error('Multiple iterator not supported');
+ }
+ // don't start writing until the user starts consuming the iterator
+ writingPromise = writeAll();
+ return {
+ async next() {
+ const result = await carOutIter.next();
+ if (result.done) {
+ await writingPromise; // any errors will propagate from here
+ }
+ return result;
+ }
+ };
+ }
+ };
return { root, out };
}
exports.pack = pack;
@@ -7449,7 +8354,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.unpackStream = exports.unpack = void 0;
const browser_readablestream_to_it_1 = __importDefault(__nccwpck_require__(664));
const iterator_1 = __nccwpck_require__(8229);
-const ipfs_unixfs_exporter_1 = __importDefault(__nccwpck_require__(874));
+const ipfs_unixfs_exporter_1 = __nccwpck_require__(2190);
const verifying_get_only_blockstore_1 = __nccwpck_require__(698);
const memory_1 = __nccwpck_require__(7913);
// Export unixfs entries from car file
@@ -7459,7 +8364,7 @@ async function* unpack(carReader, roots) {
roots = await carReader.getRoots();
}
for (const root of roots) {
- yield* ipfs_unixfs_exporter_1.default.recursive(root, verifyingBlockService, { /* options */});
+ yield* (0, ipfs_unixfs_exporter_1.recursive)(root, verifyingBlockService, { /* options */});
}
}
exports.unpack = unpack;
@@ -7474,7 +8379,7 @@ async function* unpackStream(readable, { roots, blockstore: userBlockstore } = {
roots = await carIterator.getRoots();
}
for (const root of roots) {
- yield* ipfs_unixfs_exporter_1.default.recursive(root, verifyingBlockStore);
+ yield* (0, ipfs_unixfs_exporter_1.recursive)(root, verifyingBlockStore);
}
}
exports.unpackStream = unpackStream;
@@ -7486,7 +8391,7 @@ exports.unpackStream = unpackStream;
*/
function asAsyncIterable(readable) {
// @ts-ignore how to convince tsc that we are checking the type here?
- return Symbol.asyncIterator in readable ? readable : browser_readablestream_to_it_1.default(readable);
+ return Symbol.asyncIterator in readable ? readable : (0, browser_readablestream_to_it_1.default)(readable);
}
@@ -7499,7 +8404,7 @@ function asAsyncIterable(readable) {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.VerifyingGetOnlyBlockStore = void 0;
-const uint8arrays_1 = __nccwpck_require__(5804);
+const equals_1 = __nccwpck_require__(9192);
const sha2_1 = __nccwpck_require__(6987);
const interface_blockstore_1 = __nccwpck_require__(6701);
class VerifyingGetOnlyBlockStore extends interface_blockstore_1.BlockstoreAdapter {
@@ -7533,7 +8438,7 @@ class VerifyingGetOnlyBlockStore extends interface_blockstore_1.BlockstoreAdapte
exports.VerifyingGetOnlyBlockStore = VerifyingGetOnlyBlockStore;
async function isValid({ cid, bytes }) {
const hash = await sha2_1.sha256.digest(bytes);
- return uint8arrays_1.equals(hash.digest, cid.multihash.digest);
+ return (0, equals_1.equals)(hash.digest, cid.multihash.digest);
}
@@ -7546,7 +8451,7 @@ async function isValid({ cid, bytes }) {
const normaliseContent = __nccwpck_require__(1168)
-const normaliseInput = __nccwpck_require__(649)
+const normalise = __nccwpck_require__(649)
/**
* @typedef {import('ipfs-core-types/src/utils').ImportCandidateStream} ImportCandidateStream
@@ -7563,9 +8468,14 @@ const normaliseInput = __nccwpck_require__(649)
* See https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options
*
* @param {ImportCandidateStream} input
- * @returns {AsyncGenerator}
*/
-module.exports = (input) => normaliseInput(input, normaliseContent)
+function normaliseInput (input) {
+ return normalise(input, normaliseContent)
+}
+
+module.exports = {
+ normaliseInput
+}
/***/ }),
@@ -7577,7 +8487,7 @@ module.exports = (input) => normaliseInput(input, normaliseContent)
const errCode = __nccwpck_require__(2997)
-const uint8ArrayFromString = __nccwpck_require__(828)
+const { fromString: uint8ArrayFromString } = __nccwpck_require__(3538)
const browserStreamToIt = __nccwpck_require__(664)
const blobToIt = __nccwpck_require__(7842)
const itPeekable = __nccwpck_require__(2276)
@@ -7694,7 +8604,7 @@ const {
const {
parseMtime,
parseMode
-} = __nccwpck_require__(9811)
+} = __nccwpck_require__(4103)
/**
* @typedef {import('ipfs-core-types/src/utils').ToContent} ToContent
@@ -7709,7 +8619,7 @@ const {
// eslint-disable-next-line complexity
module.exports = async function * normaliseInput (input, normaliseContent) {
if (input === null || input === undefined) {
- return
+ throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT')
}
// String
@@ -7798,6 +8708,7 @@ async function toFileObject (input, normaliseContent) {
}
if (content) {
+ // @ts-ignore TODO vmx 2021-03-30 enable again
file.content = await normaliseContent(content)
} else if (!path) { // Not already a file object with path or content prop
// @ts-ignore - input still can be different ToContent
@@ -7816,8 +8727,6 @@ async function toFileObject (input, normaliseContent) {
"use strict";
-const { Blob } = globalThis
-
/**
* @param {any} obj
* @returns {obj is ArrayBufferView|ArrayBuffer}
@@ -7828,10 +8737,12 @@ function isBytes (obj) {
/**
* @param {any} obj
- * @returns {obj is Blob}
+ * @returns {obj is globalThis.Blob}
*/
function isBlob (obj) {
- return typeof Blob !== 'undefined' && obj instanceof Blob
+ return obj.constructor &&
+ (obj.constructor.name === 'Blob' || obj.constructor.name === 'File') &&
+ typeof obj.stream === 'function'
}
/**
@@ -7861,5349 +8772,2490 @@ module.exports = {
/***/ }),
-/***/ 7381:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+/***/ 2190:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
-const {
- Data: PBData
-} = __nccwpck_require__(6189)
-const errcode = __nccwpck_require__(2997)
-
-/**
- * @typedef {import('./types').Mtime} Mtime
- * @typedef {import('./types').MtimeLike} MtimeLike
- */
+Object.defineProperty(exports, "__esModule", ({ value: true }));
-const types = [
- 'raw',
- 'directory',
- 'file',
- 'metadata',
- 'symlink',
- 'hamt-sharded-directory'
-]
+var errCode = __nccwpck_require__(2997);
+var cid = __nccwpck_require__(6447);
+var index = __nccwpck_require__(9933);
+var last = __nccwpck_require__(7123);
-const dirTypes = [
- 'directory',
- 'hamt-sharded-directory'
-]
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-const DEFAULT_FILE_MODE = parseInt('0644', 8)
-const DEFAULT_DIRECTORY_MODE = parseInt('0755', 8)
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+var last__default = /*#__PURE__*/_interopDefaultLegacy(last);
-/**
- * @param {string | number | undefined} [mode]
- */
-function parseMode (mode) {
- if (mode == null) {
- return undefined
+const toPathComponents = (path = '') => {
+ return (path.trim().match(/([^\\^/]|\\\/)+/g) || []).filter(Boolean);
+};
+const cidAndRest = path => {
+ if (path instanceof Uint8Array) {
+ return {
+ cid: cid.CID.decode(path),
+ toResolve: []
+ };
}
-
- if (typeof mode === 'number') {
- return mode & 0xFFF
+ const cid$1 = cid.CID.asCID(path);
+ if (cid$1) {
+ return {
+ cid: cid$1,
+ toResolve: []
+ };
}
-
- mode = mode.toString()
-
- if (mode.substring(0, 1) === '0') {
- // octal string
- return parseInt(mode, 8) & 0xFFF
+ if (typeof path === 'string') {
+ if (path.indexOf('/ipfs/') === 0) {
+ path = path.substring(6);
+ }
+ const output = toPathComponents(path);
+ return {
+ cid: cid.CID.parse(output[0]),
+ toResolve: output.slice(1)
+ };
+ }
+ throw errCode__default['default'](new Error(`Unknown path type ${ path }`), 'ERR_BAD_PATH');
+};
+async function* walkPath(path, blockstore, options = {}) {
+ let {cid, toResolve} = cidAndRest(path);
+ let name = cid.toString();
+ let entryPath = name;
+ const startingDepth = toResolve.length;
+ while (true) {
+ const result = await index(cid, name, entryPath, toResolve, startingDepth, blockstore, options);
+ if (!result.entry && !result.next) {
+ throw errCode__default['default'](new Error(`Could not resolve ${ path }`), 'ERR_NOT_FOUND');
+ }
+ if (result.entry) {
+ yield result.entry;
+ }
+ if (!result.next) {
+ return;
+ }
+ toResolve = result.next.toResolve;
+ cid = result.next.cid;
+ name = result.next.name;
+ entryPath = result.next.path;
}
-
- // decimal string
- return parseInt(mode, 10) & 0xFFF
}
-
-/**
- * @param {any} input
- */
-function parseMtime (input) {
- if (input == null) {
- return undefined
+async function exporter(path, blockstore, options = {}) {
+ const result = await last__default['default'](walkPath(path, blockstore, options));
+ if (!result) {
+ throw errCode__default['default'](new Error(`Could not resolve ${ path }`), 'ERR_NOT_FOUND');
}
-
- /** @type {Mtime | undefined} */
- let mtime
-
- // { secs, nsecs }
- if (input.secs != null) {
- mtime = {
- secs: input.secs,
- nsecs: input.nsecs
- }
+ return result;
+}
+async function* recursive(path, blockstore, options = {}) {
+ const node = await exporter(path, blockstore, options);
+ if (!node) {
+ return;
}
-
- // UnixFS TimeSpec
- if (input.Seconds != null) {
- mtime = {
- secs: input.Seconds,
- nsecs: input.FractionalNanoseconds
+ yield node;
+ if (node.type === 'directory') {
+ for await (const child of recurse(node, options)) {
+ yield child;
}
}
-
- // process.hrtime()
- if (Array.isArray(input)) {
- mtime = {
- secs: input[0],
- nsecs: input[1]
+ async function* recurse(node, options) {
+ for await (const file of node.content(options)) {
+ yield file;
+ if (file instanceof Uint8Array) {
+ continue;
+ }
+ if (file.type === 'directory') {
+ yield* recurse(file, options);
+ }
}
}
+}
- // Javascript Date
- if (input instanceof Date) {
- const ms = input.getTime()
- const secs = Math.floor(ms / 1000)
-
- mtime = {
- secs: secs,
- nsecs: (ms - (secs * 1000)) * 1000
- }
- }
+exports.exporter = exporter;
+exports.recursive = recursive;
+exports.walkPath = walkPath;
- /*
- TODO: https://github.com/ipfs/aegir/issues/487
- // process.hrtime.bigint()
- if (input instanceof BigInt) {
- const secs = input / BigInt(1e9)
- const nsecs = input - (secs * BigInt(1e9))
+/***/ }),
- mtime = {
- secs: parseInt(secs.toString()),
- nsecs: parseInt(nsecs.toString())
- }
- }
- */
+/***/ 8764:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (!Object.prototype.hasOwnProperty.call(mtime, 'secs')) {
- return undefined
- }
+"use strict";
- if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) {
- throw errcode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS')
- }
- return mtime
-}
+var cid = __nccwpck_require__(6447);
+var errCode = __nccwpck_require__(2997);
+var dagCbor = __nccwpck_require__(6477);
-class Data {
- /**
- * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md
- *
- * @param {Uint8Array} marshaled
- */
- static unmarshal (marshaled) {
- const message = PBData.decode(marshaled)
- const decoded = PBData.toObject(message, {
- defaults: false,
- arrays: true,
- longs: Number,
- objects: false
- })
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- const data = new Data({
- type: types[decoded.Type],
- data: decoded.Data,
- blockSizes: decoded.blocksizes,
- mode: decoded.mode,
- mtime: decoded.mtime
- ? {
- secs: decoded.mtime.Seconds,
- nsecs: decoded.mtime.FractionalNanoseconds
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () {
+ return e[k];
}
- : undefined
- })
-
- // make sure we honour the original mode
- data._originalMode = decoded.mode || 0
-
- return data
+ });
+ }
+ });
}
+ n['default'] = e;
+ return Object.freeze(n);
+}
- /**
- * @param {object} [options]
- * @param {string} [options.type='file']
- * @param {Uint8Array} [options.data]
- * @param {number[]} [options.blockSizes]
- * @param {number} [options.hashType]
- * @param {number} [options.fanout]
- * @param {MtimeLike | null} [options.mtime]
- * @param {number | string} [options.mode]
- */
- constructor (options = {
- type: 'file'
- }) {
- const {
- type,
- data,
- blockSizes,
- hashType,
- fanout,
- mtime,
- mode
- } = options
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+var dagCbor__namespace = /*#__PURE__*/_interopNamespace(dagCbor);
- if (type && !types.includes(type)) {
- throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')
+const resolve = async (cid$1, name, path, toResolve, resolve, depth, blockstore, options) => {
+ const block = await blockstore.get(cid$1);
+ const object = dagCbor__namespace.decode(block);
+ let subObject = object;
+ let subPath = path;
+ while (toResolve.length) {
+ const prop = toResolve[0];
+ if (prop in subObject) {
+ toResolve.shift();
+ subPath = `${ subPath }/${ prop }`;
+ const subObjectCid = cid.CID.asCID(subObject[prop]);
+ if (subObjectCid) {
+ return {
+ entry: {
+ type: 'object',
+ name,
+ path,
+ cid: cid$1,
+ node: block,
+ depth,
+ size: block.length,
+ content: async function* () {
+ yield object;
+ }
+ },
+ next: {
+ cid: subObjectCid,
+ name: prop,
+ path: subPath,
+ toResolve
+ }
+ };
+ }
+ subObject = subObject[prop];
+ } else {
+ throw errCode__default['default'](new Error(`No property named ${ prop } found in cbor node ${ cid$1 }`), 'ERR_NO_PROP');
+ }
+ }
+ return {
+ entry: {
+ type: 'object',
+ name,
+ path,
+ cid: cid$1,
+ node: block,
+ depth,
+ size: block.length,
+ content: async function* () {
+ yield object;
+ }
}
+ };
+};
- this.type = type || 'file'
- this.data = data
- this.hashType = hashType
- this.fanout = fanout
+module.exports = resolve;
- /** @type {number[]} */
- this.blockSizes = blockSizes || []
- this._originalMode = 0
- this.mode = parseMode(mode)
- if (mtime) {
- this.mtime = parseMtime(mtime)
+/***/ }),
- if (this.mtime && !this.mtime.nsecs) {
- this.mtime.nsecs = 0
- }
- }
- }
+/***/ 8155:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- /**
- * @param {number | undefined} mode
- */
- set mode (mode) {
- this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE
+"use strict";
- const parsedMode = parseMode(mode)
- if (parsedMode !== undefined) {
- this._mode = parsedMode
- }
- }
+var errCode = __nccwpck_require__(2997);
+var extractDataFromBlock = __nccwpck_require__(4840);
+var validateOffsetAndLength = __nccwpck_require__(4287);
+var mh = __nccwpck_require__(76);
- /**
- * @returns {number | undefined}
- */
- get mode () {
- return this._mode
- }
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- isDirectory () {
- return Boolean(this.type && dirTypes.includes(this.type))
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () {
+ return e[k];
+ }
+ });
+ }
+ });
}
+ n['default'] = e;
+ return Object.freeze(n);
+}
- /**
- * @param {number} size
- */
- addBlockSize (size) {
- this.blockSizes.push(size)
- }
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+var mh__namespace = /*#__PURE__*/_interopNamespace(mh);
- /**
- * @param {number} index
- */
- removeBlockSize (index) {
- this.blockSizes.splice(index, 1)
+const rawContent = node => {
+ async function* contentGenerator(options = {}) {
+ const {offset, length} = validateOffsetAndLength(node.length, options.offset, options.length);
+ yield extractDataFromBlock(node, 0, offset, offset + length);
}
-
- /**
- * Returns `0` for directories or `data.length + sum(blockSizes)` for everything else
- */
- fileSize () {
- if (this.isDirectory()) {
- // dirs don't have file size
- return 0
- }
-
- let sum = 0
- this.blockSizes.forEach((size) => {
- sum += size
- })
-
- if (this.data) {
- sum += this.data.length
- }
-
- return sum
+ return contentGenerator;
+};
+const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {
+ if (toResolve.length) {
+ throw errCode__default['default'](new Error(`No link named ${ path } found in raw node ${ cid }`), 'ERR_NOT_FOUND');
}
-
- /**
- * encode to protobuf Uint8Array
- */
- marshal () {
- let type
-
- switch (this.type) {
- case 'raw': type = PBData.DataType.Raw; break
- case 'directory': type = PBData.DataType.Directory; break
- case 'file': type = PBData.DataType.File; break
- case 'metadata': type = PBData.DataType.Metadata; break
- case 'symlink': type = PBData.DataType.Symlink; break
- case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break
- default:
- throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')
+ const buf = await mh__namespace.decode(cid.multihash.bytes);
+ return {
+ entry: {
+ type: 'identity',
+ name,
+ path,
+ cid,
+ content: rawContent(buf.digest),
+ depth,
+ size: buf.digest.length,
+ node: buf.digest
}
+ };
+};
- let data = this.data
-
- if (!this.data || !this.data.length) {
- data = undefined
- }
+module.exports = resolve;
- let mode
- if (this.mode != null) {
- mode = (this._originalMode & 0xFFFFF000) | (parseMode(this.mode) || 0)
+/***/ }),
- if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {
- mode = undefined
- }
+/***/ 9933:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {
- mode = undefined
- }
- }
+"use strict";
- let mtime
- if (this.mtime != null) {
- const parsed = parseMtime(this.mtime)
+var errCode = __nccwpck_require__(2997);
+var dagPb = __nccwpck_require__(8012);
+var dagCbor = __nccwpck_require__(6477);
+var raw = __nccwpck_require__(2048);
+var identity = __nccwpck_require__(2379);
+var index = __nccwpck_require__(9662);
+var raw$1 = __nccwpck_require__(6906);
+var dagCbor$1 = __nccwpck_require__(8764);
+var identity$1 = __nccwpck_require__(8155);
- if (parsed) {
- mtime = {
- Seconds: parsed.secs,
- FractionalNanoseconds: parsed.nsecs
- }
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- if (mtime.FractionalNanoseconds === 0) {
- delete mtime.FractionalNanoseconds
- }
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () {
+ return e[k];
+ }
+ });
}
- }
+ });
+ }
+ n['default'] = e;
+ return Object.freeze(n);
+}
- const pbData = {
- Type: type,
- Data: data,
- filesize: this.isDirectory() ? undefined : this.fileSize(),
- blocksizes: this.blockSizes,
- hashType: this.hashType,
- fanout: this.fanout,
- mode,
- mtime
- }
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+var dagPb__namespace = /*#__PURE__*/_interopNamespace(dagPb);
+var dagCbor__namespace = /*#__PURE__*/_interopNamespace(dagCbor);
+var raw__namespace = /*#__PURE__*/_interopNamespace(raw);
- return PBData.encode(pbData).finish()
+const resolvers = {
+ [dagPb__namespace.code]: index,
+ [raw__namespace.code]: raw$1,
+ [dagCbor__namespace.code]: dagCbor$1,
+ [identity.identity.code]: identity$1
+};
+function resolve(cid, name, path, toResolve, depth, blockstore, options) {
+ const resolver = resolvers[cid.code];
+ if (!resolver) {
+ throw errCode__default['default'](new Error(`No resolver for code ${ cid.code }`), 'ERR_NO_RESOLVER');
}
+ return resolver(cid, name, path, toResolve, resolve, depth, blockstore, options);
}
-module.exports = {
- UnixFS: Data,
- parseMode,
- parseMtime
-}
+module.exports = resolve;
/***/ }),
-/***/ 6189:
+/***/ 6906:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-/*eslint-disable*/
-
-
-var $protobuf = __nccwpck_require__(6916);
-// Common aliases
-var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
-// Exported root namespace
-var $root = $protobuf.roots["ipfs-unixfs"] || ($protobuf.roots["ipfs-unixfs"] = {});
+var errCode = __nccwpck_require__(2997);
+var extractDataFromBlock = __nccwpck_require__(4840);
+var validateOffsetAndLength = __nccwpck_require__(4287);
-$root.Data = (function() {
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- /**
- * Properties of a Data.
- * @exports IData
- * @interface IData
- * @property {Data.DataType} Type Data Type
- * @property {Uint8Array|null} [Data] Data Data
- * @property {number|null} [filesize] Data filesize
- * @property {Array.|null} [blocksizes] Data blocksizes
- * @property {number|null} [hashType] Data hashType
- * @property {number|null} [fanout] Data fanout
- * @property {number|null} [mode] Data mode
- * @property {IUnixTime|null} [mtime] Data mtime
- */
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
- /**
- * Constructs a new Data.
- * @exports Data
- * @classdesc Represents a Data.
- * @implements IData
- * @constructor
- * @param {IData=} [p] Properties to set
- */
- function Data(p) {
- this.blocksizes = [];
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
+const rawContent = node => {
+ async function* contentGenerator(options = {}) {
+ const {offset, length} = validateOffsetAndLength(node.length, options.offset, options.length);
+ yield extractDataFromBlock(node, 0, offset, offset + length);
+ }
+ return contentGenerator;
+};
+const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {
+ if (toResolve.length) {
+ throw errCode__default['default'](new Error(`No link named ${ path } found in raw node ${ cid }`), 'ERR_NOT_FOUND');
+ }
+ const block = await blockstore.get(cid, options);
+ return {
+ entry: {
+ type: 'raw',
+ name,
+ path,
+ cid,
+ content: rawContent(block),
+ depth,
+ size: block.length,
+ node: block
}
+ };
+};
- /**
- * Data Type.
- * @member {Data.DataType} Type
- * @memberof Data
- * @instance
- */
- Data.prototype.Type = 0;
-
- /**
- * Data Data.
- * @member {Uint8Array} Data
- * @memberof Data
- * @instance
- */
- Data.prototype.Data = $util.newBuffer([]);
-
- /**
- * Data filesize.
- * @member {number} filesize
- * @memberof Data
- * @instance
- */
- Data.prototype.filesize = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
+module.exports = resolve;
- /**
- * Data blocksizes.
- * @member {Array.} blocksizes
- * @memberof Data
- * @instance
- */
- Data.prototype.blocksizes = $util.emptyArray;
- /**
- * Data hashType.
- * @member {number} hashType
- * @memberof Data
- * @instance
- */
- Data.prototype.hashType = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
+/***/ }),
- /**
- * Data fanout.
- * @member {number} fanout
- * @memberof Data
- * @instance
- */
- Data.prototype.fanout = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
+/***/ 7659:
+/***/ ((module) => {
- /**
- * Data mode.
- * @member {number} mode
- * @memberof Data
- * @instance
- */
- Data.prototype.mode = 0;
+"use strict";
- /**
- * Data mtime.
- * @member {IUnixTime|null|undefined} mtime
- * @memberof Data
- * @instance
- */
- Data.prototype.mtime = null;
- /**
- * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages.
- * @function encode
- * @memberof Data
- * @static
- * @param {IData} m Data message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- Data.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- w.uint32(8).int32(m.Type);
- if (m.Data != null && Object.hasOwnProperty.call(m, "Data"))
- w.uint32(18).bytes(m.Data);
- if (m.filesize != null && Object.hasOwnProperty.call(m, "filesize"))
- w.uint32(24).uint64(m.filesize);
- if (m.blocksizes != null && m.blocksizes.length) {
- for (var i = 0; i < m.blocksizes.length; ++i)
- w.uint32(32).uint64(m.blocksizes[i]);
- }
- if (m.hashType != null && Object.hasOwnProperty.call(m, "hashType"))
- w.uint32(40).uint64(m.hashType);
- if (m.fanout != null && Object.hasOwnProperty.call(m, "fanout"))
- w.uint32(48).uint64(m.fanout);
- if (m.mode != null && Object.hasOwnProperty.call(m, "mode"))
- w.uint32(56).uint32(m.mode);
- if (m.mtime != null && Object.hasOwnProperty.call(m, "mtime"))
- $root.UnixTime.encode(m.mtime, w.uint32(66).fork()).ldelim();
- return w;
- };
+const directoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {
+ async function* yieldDirectoryContent(options = {}) {
+ const offset = options.offset || 0;
+ const length = options.length || node.Links.length;
+ const links = node.Links.slice(offset, length);
+ for (const link of links) {
+ const result = await resolve(link.Hash, link.Name || '', `${ path }/${ link.Name || '' }`, [], depth + 1, blockstore, options);
+ if (result.entry) {
+ yield result.entry;
+ }
+ }
+ }
+ return yieldDirectoryContent;
+};
- /**
- * Decodes a Data message from the specified reader or buffer.
- * @function decode
- * @memberof Data
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {Data} Data
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- Data.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.Data();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.Type = r.int32();
- break;
- case 2:
- m.Data = r.bytes();
- break;
- case 3:
- m.filesize = r.uint64();
- break;
- case 4:
- if (!(m.blocksizes && m.blocksizes.length))
- m.blocksizes = [];
- if ((t & 7) === 2) {
- var c2 = r.uint32() + r.pos;
- while (r.pos < c2)
- m.blocksizes.push(r.uint64());
- } else
- m.blocksizes.push(r.uint64());
- break;
- case 5:
- m.hashType = r.uint64();
- break;
- case 6:
- m.fanout = r.uint64();
- break;
- case 7:
- m.mode = r.uint32();
- break;
- case 8:
- m.mtime = $root.UnixTime.decode(r, r.uint32());
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- if (!m.hasOwnProperty("Type"))
- throw $util.ProtocolError("missing required 'Type'", { instance: m });
- return m;
- };
+module.exports = directoryContent;
- /**
- * Creates a Data message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof Data
- * @static
- * @param {Object.} d Plain object
- * @returns {Data} Data
- */
- Data.fromObject = function fromObject(d) {
- if (d instanceof $root.Data)
- return d;
- var m = new $root.Data();
- switch (d.Type) {
- case "Raw":
- case 0:
- m.Type = 0;
- break;
- case "Directory":
- case 1:
- m.Type = 1;
- break;
- case "File":
- case 2:
- m.Type = 2;
- break;
- case "Metadata":
- case 3:
- m.Type = 3;
- break;
- case "Symlink":
- case 4:
- m.Type = 4;
- break;
- case "HAMTShard":
- case 5:
- m.Type = 5;
- break;
- }
- if (d.Data != null) {
- if (typeof d.Data === "string")
- $util.base64.decode(d.Data, m.Data = $util.newBuffer($util.base64.length(d.Data)), 0);
- else if (d.Data.length)
- m.Data = d.Data;
- }
- if (d.filesize != null) {
- if ($util.Long)
- (m.filesize = $util.Long.fromValue(d.filesize)).unsigned = true;
- else if (typeof d.filesize === "string")
- m.filesize = parseInt(d.filesize, 10);
- else if (typeof d.filesize === "number")
- m.filesize = d.filesize;
- else if (typeof d.filesize === "object")
- m.filesize = new $util.LongBits(d.filesize.low >>> 0, d.filesize.high >>> 0).toNumber(true);
- }
- if (d.blocksizes) {
- if (!Array.isArray(d.blocksizes))
- throw TypeError(".Data.blocksizes: array expected");
- m.blocksizes = [];
- for (var i = 0; i < d.blocksizes.length; ++i) {
- if ($util.Long)
- (m.blocksizes[i] = $util.Long.fromValue(d.blocksizes[i])).unsigned = true;
- else if (typeof d.blocksizes[i] === "string")
- m.blocksizes[i] = parseInt(d.blocksizes[i], 10);
- else if (typeof d.blocksizes[i] === "number")
- m.blocksizes[i] = d.blocksizes[i];
- else if (typeof d.blocksizes[i] === "object")
- m.blocksizes[i] = new $util.LongBits(d.blocksizes[i].low >>> 0, d.blocksizes[i].high >>> 0).toNumber(true);
- }
- }
- if (d.hashType != null) {
- if ($util.Long)
- (m.hashType = $util.Long.fromValue(d.hashType)).unsigned = true;
- else if (typeof d.hashType === "string")
- m.hashType = parseInt(d.hashType, 10);
- else if (typeof d.hashType === "number")
- m.hashType = d.hashType;
- else if (typeof d.hashType === "object")
- m.hashType = new $util.LongBits(d.hashType.low >>> 0, d.hashType.high >>> 0).toNumber(true);
- }
- if (d.fanout != null) {
- if ($util.Long)
- (m.fanout = $util.Long.fromValue(d.fanout)).unsigned = true;
- else if (typeof d.fanout === "string")
- m.fanout = parseInt(d.fanout, 10);
- else if (typeof d.fanout === "number")
- m.fanout = d.fanout;
- else if (typeof d.fanout === "object")
- m.fanout = new $util.LongBits(d.fanout.low >>> 0, d.fanout.high >>> 0).toNumber(true);
- }
- if (d.mode != null) {
- m.mode = d.mode >>> 0;
- }
- if (d.mtime != null) {
- if (typeof d.mtime !== "object")
- throw TypeError(".Data.mtime: object expected");
- m.mtime = $root.UnixTime.fromObject(d.mtime);
- }
- return m;
- };
- /**
- * Creates a plain object from a Data message. Also converts values to other types if specified.
- * @function toObject
- * @memberof Data
- * @static
- * @param {Data} m Data
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- Data.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.arrays || o.defaults) {
- d.blocksizes = [];
- }
- if (o.defaults) {
- d.Type = o.enums === String ? "Raw" : 0;
- if (o.bytes === String)
- d.Data = "";
- else {
- d.Data = [];
- if (o.bytes !== Array)
- d.Data = $util.newBuffer(d.Data);
- }
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.filesize = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.filesize = o.longs === String ? "0" : 0;
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.hashType = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.hashType = o.longs === String ? "0" : 0;
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.fanout = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.fanout = o.longs === String ? "0" : 0;
- d.mode = 0;
- d.mtime = null;
- }
- if (m.Type != null && m.hasOwnProperty("Type")) {
- d.Type = o.enums === String ? $root.Data.DataType[m.Type] : m.Type;
- }
- if (m.Data != null && m.hasOwnProperty("Data")) {
- d.Data = o.bytes === String ? $util.base64.encode(m.Data, 0, m.Data.length) : o.bytes === Array ? Array.prototype.slice.call(m.Data) : m.Data;
- }
- if (m.filesize != null && m.hasOwnProperty("filesize")) {
- if (typeof m.filesize === "number")
- d.filesize = o.longs === String ? String(m.filesize) : m.filesize;
- else
- d.filesize = o.longs === String ? $util.Long.prototype.toString.call(m.filesize) : o.longs === Number ? new $util.LongBits(m.filesize.low >>> 0, m.filesize.high >>> 0).toNumber(true) : m.filesize;
- }
- if (m.blocksizes && m.blocksizes.length) {
- d.blocksizes = [];
- for (var j = 0; j < m.blocksizes.length; ++j) {
- if (typeof m.blocksizes[j] === "number")
- d.blocksizes[j] = o.longs === String ? String(m.blocksizes[j]) : m.blocksizes[j];
- else
- d.blocksizes[j] = o.longs === String ? $util.Long.prototype.toString.call(m.blocksizes[j]) : o.longs === Number ? new $util.LongBits(m.blocksizes[j].low >>> 0, m.blocksizes[j].high >>> 0).toNumber(true) : m.blocksizes[j];
- }
- }
- if (m.hashType != null && m.hasOwnProperty("hashType")) {
- if (typeof m.hashType === "number")
- d.hashType = o.longs === String ? String(m.hashType) : m.hashType;
- else
- d.hashType = o.longs === String ? $util.Long.prototype.toString.call(m.hashType) : o.longs === Number ? new $util.LongBits(m.hashType.low >>> 0, m.hashType.high >>> 0).toNumber(true) : m.hashType;
- }
- if (m.fanout != null && m.hasOwnProperty("fanout")) {
- if (typeof m.fanout === "number")
- d.fanout = o.longs === String ? String(m.fanout) : m.fanout;
- else
- d.fanout = o.longs === String ? $util.Long.prototype.toString.call(m.fanout) : o.longs === Number ? new $util.LongBits(m.fanout.low >>> 0, m.fanout.high >>> 0).toNumber(true) : m.fanout;
- }
- if (m.mode != null && m.hasOwnProperty("mode")) {
- d.mode = m.mode;
- }
- if (m.mtime != null && m.hasOwnProperty("mtime")) {
- d.mtime = $root.UnixTime.toObject(m.mtime, o);
- }
- return d;
- };
+/***/ }),
- /**
- * Converts this Data to JSON.
- * @function toJSON
- * @memberof Data
- * @instance
- * @returns {Object.} JSON object
- */
- Data.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
+/***/ 5704:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- /**
- * DataType enum.
- * @name Data.DataType
- * @enum {number}
- * @property {number} Raw=0 Raw value
- * @property {number} Directory=1 Directory value
- * @property {number} File=2 File value
- * @property {number} Metadata=3 Metadata value
- * @property {number} Symlink=4 Symlink value
- * @property {number} HAMTShard=5 HAMTShard value
- */
- Data.DataType = (function() {
- var valuesById = {}, values = Object.create(valuesById);
- values[valuesById[0] = "Raw"] = 0;
- values[valuesById[1] = "Directory"] = 1;
- values[valuesById[2] = "File"] = 2;
- values[valuesById[3] = "Metadata"] = 3;
- values[valuesById[4] = "Symlink"] = 4;
- values[valuesById[5] = "HAMTShard"] = 5;
- return values;
- })();
+"use strict";
- return Data;
-})();
-$root.UnixTime = (function() {
+var extractDataFromBlock = __nccwpck_require__(4840);
+var validateOffsetAndLength = __nccwpck_require__(4287);
+var ipfsUnixfs = __nccwpck_require__(4103);
+var errCode = __nccwpck_require__(2997);
+var dagPb = __nccwpck_require__(8012);
+var dagCbor = __nccwpck_require__(6477);
+var raw = __nccwpck_require__(2048);
- /**
- * Properties of an UnixTime.
- * @exports IUnixTime
- * @interface IUnixTime
- * @property {number} Seconds UnixTime Seconds
- * @property {number|null} [FractionalNanoseconds] UnixTime FractionalNanoseconds
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- /**
- * Constructs a new UnixTime.
- * @exports UnixTime
- * @classdesc Represents an UnixTime.
- * @implements IUnixTime
- * @constructor
- * @param {IUnixTime=} [p] Properties to set
- */
- function UnixTime(p) {
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
- }
-
- /**
- * UnixTime Seconds.
- * @member {number} Seconds
- * @memberof UnixTime
- * @instance
- */
- UnixTime.prototype.Seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0;
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () {
+ return e[k];
+ }
+ });
+ }
+ });
+ }
+ n['default'] = e;
+ return Object.freeze(n);
+}
- /**
- * UnixTime FractionalNanoseconds.
- * @member {number} FractionalNanoseconds
- * @memberof UnixTime
- * @instance
- */
- UnixTime.prototype.FractionalNanoseconds = 0;
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+var dagPb__namespace = /*#__PURE__*/_interopNamespace(dagPb);
+var dagCbor__namespace = /*#__PURE__*/_interopNamespace(dagCbor);
+var raw__namespace = /*#__PURE__*/_interopNamespace(raw);
- /**
- * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages.
- * @function encode
- * @memberof UnixTime
- * @static
- * @param {IUnixTime} m UnixTime message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- UnixTime.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- w.uint32(8).int64(m.Seconds);
- if (m.FractionalNanoseconds != null && Object.hasOwnProperty.call(m, "FractionalNanoseconds"))
- w.uint32(21).fixed32(m.FractionalNanoseconds);
- return w;
- };
+async function* emitBytes(blockstore, node, start, end, streamPosition = 0, options) {
+ if (node instanceof Uint8Array) {
+ const buf = extractDataFromBlock(node, streamPosition, start, end);
+ if (buf.length) {
+ yield buf;
+ }
+ streamPosition += buf.length;
+ return streamPosition;
+ }
+ if (node.Data == null) {
+ throw errCode__default['default'](new Error('no data in PBNode'), 'ERR_NOT_UNIXFS');
+ }
+ let file;
+ try {
+ file = ipfsUnixfs.UnixFS.unmarshal(node.Data);
+ } catch (err) {
+ throw errCode__default['default'](err, 'ERR_NOT_UNIXFS');
+ }
+ if (file.data && file.data.length) {
+ const buf = extractDataFromBlock(file.data, streamPosition, start, end);
+ if (buf.length) {
+ yield buf;
+ }
+ streamPosition += file.data.length;
+ }
+ let childStart = streamPosition;
+ for (let i = 0; i < node.Links.length; i++) {
+ const childLink = node.Links[i];
+ const childEnd = streamPosition + file.blockSizes[i];
+ if (start >= childStart && start < childEnd || end > childStart && end <= childEnd || start < childStart && end > childEnd) {
+ const block = await blockstore.get(childLink.Hash, { signal: options.signal });
+ let child;
+ switch (childLink.Hash.code) {
+ case dagPb__namespace.code:
+ child = await dagPb__namespace.decode(block);
+ break;
+ case raw__namespace.code:
+ child = block;
+ break;
+ case dagCbor__namespace.code:
+ child = await dagCbor__namespace.decode(block);
+ break;
+ default:
+ throw Error(`Unsupported codec: ${ childLink.Hash.code }`);
+ }
+ for await (const buf of emitBytes(blockstore, child, start, end, streamPosition, options)) {
+ streamPosition += buf.length;
+ yield buf;
+ }
+ }
+ streamPosition = childEnd;
+ childStart = childEnd + 1;
+ }
+}
+const fileContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {
+ function yieldFileContent(options = {}) {
+ const fileSize = unixfs.fileSize();
+ if (fileSize === undefined) {
+ throw new Error('File was a directory');
+ }
+ const {offset, length} = validateOffsetAndLength(fileSize, options.offset, options.length);
+ const start = offset;
+ const end = offset + length;
+ return emitBytes(blockstore, node, start, end, 0, options);
+ }
+ return yieldFileContent;
+};
- /**
- * Decodes an UnixTime message from the specified reader or buffer.
- * @function decode
- * @memberof UnixTime
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {UnixTime} UnixTime
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- UnixTime.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.UnixTime();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.Seconds = r.int64();
- break;
- case 2:
- m.FractionalNanoseconds = r.fixed32();
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- if (!m.hasOwnProperty("Seconds"))
- throw $util.ProtocolError("missing required 'Seconds'", { instance: m });
- return m;
- };
+module.exports = fileContent;
- /**
- * Creates an UnixTime message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof UnixTime
- * @static
- * @param {Object.} d Plain object
- * @returns {UnixTime} UnixTime
- */
- UnixTime.fromObject = function fromObject(d) {
- if (d instanceof $root.UnixTime)
- return d;
- var m = new $root.UnixTime();
- if (d.Seconds != null) {
- if ($util.Long)
- (m.Seconds = $util.Long.fromValue(d.Seconds)).unsigned = false;
- else if (typeof d.Seconds === "string")
- m.Seconds = parseInt(d.Seconds, 10);
- else if (typeof d.Seconds === "number")
- m.Seconds = d.Seconds;
- else if (typeof d.Seconds === "object")
- m.Seconds = new $util.LongBits(d.Seconds.low >>> 0, d.Seconds.high >>> 0).toNumber();
- }
- if (d.FractionalNanoseconds != null) {
- m.FractionalNanoseconds = d.FractionalNanoseconds >>> 0;
- }
- return m;
- };
- /**
- * Creates a plain object from an UnixTime message. Also converts values to other types if specified.
- * @function toObject
- * @memberof UnixTime
- * @static
- * @param {UnixTime} m UnixTime
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- UnixTime.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.defaults) {
- if ($util.Long) {
- var n = new $util.Long(0, 0, false);
- d.Seconds = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.Seconds = o.longs === String ? "0" : 0;
- d.FractionalNanoseconds = 0;
- }
- if (m.Seconds != null && m.hasOwnProperty("Seconds")) {
- if (typeof m.Seconds === "number")
- d.Seconds = o.longs === String ? String(m.Seconds) : m.Seconds;
- else
- d.Seconds = o.longs === String ? $util.Long.prototype.toString.call(m.Seconds) : o.longs === Number ? new $util.LongBits(m.Seconds.low >>> 0, m.Seconds.high >>> 0).toNumber() : m.Seconds;
- }
- if (m.FractionalNanoseconds != null && m.hasOwnProperty("FractionalNanoseconds")) {
- d.FractionalNanoseconds = m.FractionalNanoseconds;
- }
- return d;
- };
+/***/ }),
- /**
- * Converts this UnixTime to JSON.
- * @function toJSON
- * @memberof UnixTime
- * @instance
- * @returns {Object.} JSON object
- */
- UnixTime.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
+/***/ 9226:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- return UnixTime;
-})();
+"use strict";
-$root.Metadata = (function() {
- /**
- * Properties of a Metadata.
- * @exports IMetadata
- * @interface IMetadata
- * @property {string|null} [MimeType] Metadata MimeType
- */
+var dagPb = __nccwpck_require__(8012);
- /**
- * Constructs a new Metadata.
- * @exports Metadata
- * @classdesc Represents a Metadata.
- * @implements IMetadata
- * @constructor
- * @param {IMetadata=} [p] Properties to set
- */
- function Metadata(p) {
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
+const hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {
+ function yieldHamtDirectoryContent(options = {}) {
+ return listDirectory(node, path, resolve, depth, blockstore, options);
+ }
+ return yieldHamtDirectoryContent;
+};
+async function* listDirectory(node, path, resolve, depth, blockstore, options) {
+ const links = node.Links;
+ for (const link of links) {
+ const name = link.Name != null ? link.Name.substring(2) : null;
+ if (name) {
+ const result = await resolve(link.Hash, name, `${ path }/${ name }`, [], depth + 1, blockstore, options);
+ yield result.entry;
+ } else {
+ const block = await blockstore.get(link.Hash);
+ node = dagPb.decode(block);
+ for await (const file of listDirectory(node, path, resolve, depth, blockstore, options)) {
+ yield file;
+ }
}
+ }
+}
- /**
- * Metadata MimeType.
- * @member {string} MimeType
- * @memberof Metadata
- * @instance
- */
- Metadata.prototype.MimeType = "";
-
- /**
- * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.
- * @function encode
- * @memberof Metadata
- * @static
- * @param {IMetadata} m Metadata message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- Metadata.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- if (m.MimeType != null && Object.hasOwnProperty.call(m, "MimeType"))
- w.uint32(10).string(m.MimeType);
- return w;
- };
-
- /**
- * Decodes a Metadata message from the specified reader or buffer.
- * @function decode
- * @memberof Metadata
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {Metadata} Metadata
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- Metadata.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.MimeType = r.string();
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- return m;
- };
-
- /**
- * Creates a Metadata message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof Metadata
- * @static
- * @param {Object.} d Plain object
- * @returns {Metadata} Metadata
- */
- Metadata.fromObject = function fromObject(d) {
- if (d instanceof $root.Metadata)
- return d;
- var m = new $root.Metadata();
- if (d.MimeType != null) {
- m.MimeType = String(d.MimeType);
- }
- return m;
- };
-
- /**
- * Creates a plain object from a Metadata message. Also converts values to other types if specified.
- * @function toObject
- * @memberof Metadata
- * @static
- * @param {Metadata} m Metadata
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- Metadata.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.defaults) {
- d.MimeType = "";
- }
- if (m.MimeType != null && m.hasOwnProperty("MimeType")) {
- d.MimeType = m.MimeType;
- }
- return d;
- };
-
- /**
- * Converts this Metadata to JSON.
- * @function toJSON
- * @memberof Metadata
- * @instance
- * @returns {Object.} JSON object
- */
- Metadata.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
-
- return Metadata;
-})();
-
-module.exports = $root;
+module.exports = hamtShardedDirectoryContent;
/***/ }),
-/***/ 874:
+/***/ 9662:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const errCode = __nccwpck_require__(2997)
-const { CID } = __nccwpck_require__(6447)
-const resolve = __nccwpck_require__(1120)
-const last = __nccwpck_require__(7123)
+var errCode = __nccwpck_require__(2997);
+var ipfsUnixfs = __nccwpck_require__(4103);
+var findCidInShard = __nccwpck_require__(3109);
+var dagPb = __nccwpck_require__(8012);
+var file = __nccwpck_require__(5704);
+var directory = __nccwpck_require__(7659);
+var hamtShardedDirectory = __nccwpck_require__(9226);
-/**
- * @typedef {import('ipfs-unixfs').UnixFS} UnixFS
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {import('./types').ExporterOptions} ExporterOptions
- * @typedef {import('./types').UnixFSFile} UnixFSFile
- * @typedef {import('./types').UnixFSDirectory} UnixFSDirectory
- * @typedef {import('./types').ObjectNode} ObjectNode
- * @typedef {import('./types').RawNode} RawNode
- * @typedef {import('./types').IdentityNode} IdentityNode
- * @typedef {import('./types').UnixFSEntry} UnixFSEntry
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-const toPathComponents = (path = '') => {
- // split on / unless escaped with \
- return (path
- .trim()
- .match(/([^\\^/]|\\\/)+/g) || [])
- .filter(Boolean)
-}
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
-/**
- * @param {string|Uint8Array|CID} path
- */
-const cidAndRest = (path) => {
- if (path instanceof Uint8Array) {
- return {
- cid: CID.decode(path),
- toResolve: []
- }
+const findLinkCid = (node, name) => {
+ const link = node.Links.find(link => link.Name === name);
+ return link && link.Hash;
+};
+const contentExporters = {
+ raw: file,
+ file: file,
+ directory: directory,
+ 'hamt-sharded-directory': hamtShardedDirectory,
+ metadata: (cid, node, unixfs, path, resolve, depth, blockstore) => {
+ return () => [];
+ },
+ symlink: (cid, node, unixfs, path, resolve, depth, blockstore) => {
+ return () => [];
}
-
- const cid = CID.asCID(path)
- if (cid) {
- return {
- cid,
- toResolve: []
- }
+};
+const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {
+ const block = await blockstore.get(cid, options);
+ const node = dagPb.decode(block);
+ let unixfs;
+ let next;
+ if (!name) {
+ name = cid.toString();
}
-
- if (typeof path === 'string') {
- if (path.indexOf('/ipfs/') === 0) {
- path = path.substring(6)
- }
-
- const output = toPathComponents(path)
-
- return {
- cid: CID.parse(output[0]),
- toResolve: output.slice(1)
- }
+ if (node.Data == null) {
+ throw errCode__default['default'](new Error('no data in PBNode'), 'ERR_NOT_UNIXFS');
}
-
- throw errCode(new Error(`Unknown path type ${path}`), 'ERR_BAD_PATH')
-}
-
-/**
- * @param {string | CID} path
- * @param {Blockstore} blockstore
- * @param {ExporterOptions} [options]
- */
-async function * walkPath (path, blockstore, options = {}) {
- let {
- cid,
- toResolve
- } = cidAndRest(path)
- let name = cid.toString()
- let entryPath = name
- const startingDepth = toResolve.length
-
- while (true) {
- const result = await resolve(cid, name, entryPath, toResolve, startingDepth, blockstore, options)
-
- if (!result.entry && !result.next) {
- throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND')
- }
-
- if (result.entry) {
- yield result.entry
+ try {
+ unixfs = ipfsUnixfs.UnixFS.unmarshal(node.Data);
+ } catch (err) {
+ throw errCode__default['default'](err, 'ERR_NOT_UNIXFS');
+ }
+ if (!path) {
+ path = name;
+ }
+ if (toResolve.length) {
+ let linkCid;
+ if (unixfs && unixfs.type === 'hamt-sharded-directory') {
+ linkCid = await findCidInShard(node, toResolve[0], blockstore);
+ } else {
+ linkCid = findLinkCid(node, toResolve[0]);
}
-
- if (!result.next) {
- return
+ if (!linkCid) {
+ throw errCode__default['default'](new Error('file does not exist'), 'ERR_NOT_FOUND');
}
-
- // resolve further parts
- toResolve = result.next.toResolve
- cid = result.next.cid
- name = result.next.name
- entryPath = result.next.path
+ const nextName = toResolve.shift();
+ const nextPath = `${ path }/${ nextName }`;
+ next = {
+ cid: linkCid,
+ toResolve,
+ name: nextName || '',
+ path: nextPath
+ };
}
-}
+ return {
+ entry: {
+ type: unixfs.isDirectory() ? 'directory' : 'file',
+ name,
+ path,
+ cid,
+ content: contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, blockstore),
+ unixfs,
+ depth,
+ node,
+ size: unixfs.fileSize()
+ },
+ next
+ };
+};
-/**
- * @param {string | CID} path
- * @param {Blockstore} blockstore
- * @param {ExporterOptions} [options]
- */
-async function exporter (path, blockstore, options = {}) {
- const result = await last(walkPath(path, blockstore, options))
+module.exports = unixFsResolver;
- if (!result) {
- throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND')
- }
- return result
-}
+/***/ }),
-/**
- * @param {string | CID} path
- * @param {Blockstore} blockstore
- * @param {ExporterOptions} [options]
- */
-async function * recursive (path, blockstore, options = {}) {
- const node = await exporter(path, blockstore, options)
+/***/ 4840:
+/***/ ((module) => {
- if (!node) {
- return
- }
+"use strict";
- yield node
- if (node.type === 'directory') {
- for await (const child of recurse(node, options)) {
- yield child
- }
+function extractDataFromBlock(block, blockStart, requestedStart, requestedEnd) {
+ const blockLength = block.length;
+ const blockEnd = blockStart + blockLength;
+ if (requestedStart >= blockEnd || requestedEnd < blockStart) {
+ return new Uint8Array(0);
}
-
- /**
- * @param {UnixFSDirectory} node
- * @param {ExporterOptions} options
- * @returns {AsyncGenerator}
- */
- async function * recurse (node, options) {
- for await (const file of node.content(options)) {
- yield file
-
- if (file instanceof Uint8Array) {
- continue
- }
-
- if (file.type === 'directory') {
- yield * recurse(file, options)
- }
- }
+ if (requestedEnd >= blockStart && requestedEnd < blockEnd) {
+ block = block.slice(0, requestedEnd - blockStart);
+ }
+ if (requestedStart >= blockStart && requestedStart < blockEnd) {
+ block = block.slice(requestedStart - blockStart);
}
+ return block;
}
-module.exports = {
- exporter,
- walkPath,
- recursive
-}
+module.exports = extractDataFromBlock;
/***/ }),
-/***/ 7049:
+/***/ 3109:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const { CID } = __nccwpck_require__(6447)
-const errCode = __nccwpck_require__(2997)
-const dagCbor = __nccwpck_require__(6477)
+var hamtSharding = __nccwpck_require__(7820);
+var dagPb = __nccwpck_require__(8012);
+var murmur3 = __nccwpck_require__(6063);
-/**
- * @typedef {import('../types').Resolver} Resolver
- */
-
-/**
- * @type {Resolver}
- */
-const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {
- const block = await blockstore.get(cid)
- const object = dagCbor.decode(block)
- let subObject = object
- let subPath = path
-
- while (toResolve.length) {
- const prop = toResolve[0]
-
- if (prop in subObject) {
- // remove the bit of the path we have resolved
- toResolve.shift()
- subPath = `${subPath}/${prop}`
-
- const subObjectCid = CID.asCID(subObject[prop])
- if (subObjectCid) {
- return {
- entry: {
- type: 'object',
- name,
- path,
- cid,
- node: block,
- depth,
- size: block.length,
- content: async function * () {
- yield object
- }
- },
- next: {
- cid: subObjectCid,
- name: prop,
- path: subPath,
- toResolve
- }
- }
- }
-
- subObject = subObject[prop]
- } else {
- // cannot resolve further
- throw errCode(new Error(`No property named ${prop} found in cbor node ${cid}`), 'ERR_NO_PROP')
+const hashFn = async function (buf) {
+ return (await murmur3.murmur3128.encode(buf)).slice(0, 8).reverse();
+};
+const addLinksToHamtBucket = (links, bucket, rootBucket) => {
+ return Promise.all(links.map(link => {
+ if (link.Name == null) {
+ throw new Error('Unexpected Link without a Name');
}
- }
-
- return {
- entry: {
- type: 'object',
- name,
- path,
- cid,
- node: block,
- depth,
- size: block.length,
- content: async function * () {
- yield object
- }
+ if (link.Name.length === 2) {
+ const pos = parseInt(link.Name, 16);
+ return bucket._putObjectAt(pos, new hamtSharding.Bucket({
+ hash: rootBucket._options.hash,
+ bits: rootBucket._options.bits
+ }, bucket, pos));
}
+ return rootBucket.put(link.Name.substring(2), true);
+ }));
+};
+const toPrefix = position => {
+ return position.toString(16).toUpperCase().padStart(2, '0').substring(0, 2);
+};
+const toBucketPath = position => {
+ let bucket = position.bucket;
+ const path = [];
+ while (bucket._parent) {
+ path.push(bucket);
+ bucket = bucket._parent;
}
-}
-
-module.exports = resolve
-
-
-/***/ }),
-
-/***/ 8522:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const errCode = __nccwpck_require__(2997)
-const extractDataFromBlock = __nccwpck_require__(2553)
-const validateOffsetAndLength = __nccwpck_require__(4178)
-const mh = __nccwpck_require__(76)
-
-/**
- * @typedef {import('../types').ExporterOptions} ExporterOptions
- * @typedef {import('../types').Resolver} Resolver
- */
-
-/**
- * @param {Uint8Array} node
- */
-const rawContent = (node) => {
- /**
- * @param {ExporterOptions} options
- */
- async function * contentGenerator (options = {}) {
- const {
- offset,
- length
- } = validateOffsetAndLength(node.length, options.offset, options.length)
-
- yield extractDataFromBlock(node, 0, offset, offset + length)
+ path.push(bucket);
+ return path.reverse();
+};
+const findShardCid = async (node, name, blockstore, context, options) => {
+ if (!context) {
+ const rootBucket = hamtSharding.createHAMT({ hashFn });
+ context = {
+ rootBucket,
+ hamtDepth: 1,
+ lastBucket: rootBucket
+ };
}
-
- return contentGenerator
-}
-
-/**
- * @type {Resolver}
- */
-const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {
- if (toResolve.length) {
- throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND')
+ await addLinksToHamtBucket(node.Links, context.lastBucket, context.rootBucket);
+ const position = await context.rootBucket._findNewBucketAndPos(name);
+ let prefix = toPrefix(position.pos);
+ const bucketPath = toBucketPath(position);
+ if (bucketPath.length > context.hamtDepth) {
+ context.lastBucket = bucketPath[context.hamtDepth];
+ prefix = toPrefix(context.lastBucket._posAtParent);
}
- const buf = await mh.decode(cid.multihash.bytes)
-
- return {
- entry: {
- type: 'identity',
- name,
- path,
- cid,
- content: rawContent(buf.digest),
- depth,
- size: buf.digest.length,
- node: buf.digest
+ const link = node.Links.find(link => {
+ if (link.Name == null) {
+ return false;
+ }
+ const entryPrefix = link.Name.substring(0, 2);
+ const entryName = link.Name.substring(2);
+ if (entryPrefix !== prefix) {
+ return false;
+ }
+ if (entryName && entryName !== name) {
+ return false;
}
+ return true;
+ });
+ if (!link) {
+ return null;
}
-}
-
-module.exports = resolve
-
-
-/***/ }),
-
-/***/ 1120:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const errCode = __nccwpck_require__(2997)
-
-const dagPb = __nccwpck_require__(8012)
-const dagCbor = __nccwpck_require__(6477)
-const raw = __nccwpck_require__(2048)
-const { identity } = __nccwpck_require__(2379)
-
-/**
- * @typedef {import('../types').Resolver} Resolver
- * @typedef {import('../types').Resolve} Resolve
- */
-
-/**
- * @type {{ [ key: string ]: Resolver }}
- */
-const resolvers = {
- [dagPb.code]: __nccwpck_require__(1799),
- [raw.code]: __nccwpck_require__(8731),
- [dagCbor.code]: __nccwpck_require__(7049),
- [identity.code]: __nccwpck_require__(8522)
-}
-
-/**
- * @type {Resolve}
- */
-function resolve (cid, name, path, toResolve, depth, blockstore, options) {
- const resolver = resolvers[cid.code]
-
- if (!resolver) {
- throw errCode(new Error(`No resolver for code ${cid.code}`), 'ERR_NO_RESOLVER')
+ if (link.Name != null && link.Name.substring(2) === name) {
+ return link.Hash;
}
+ context.hamtDepth++;
+ const block = await blockstore.get(link.Hash, options);
+ node = dagPb.decode(block);
+ return findShardCid(node, name, blockstore, context, options);
+};
- return resolver(cid, name, path, toResolve, resolve, depth, blockstore, options)
-}
-
-module.exports = resolve
+module.exports = findShardCid;
/***/ }),
-/***/ 8731:
+/***/ 4287:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const errCode = __nccwpck_require__(2997)
-const extractDataFromBlock = __nccwpck_require__(2553)
-const validateOffsetAndLength = __nccwpck_require__(4178)
+var errCode = __nccwpck_require__(2997);
-/**
- * @typedef {import('../types').ExporterOptions} ExporterOptions
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-/**
- * @param {Uint8Array} node
- */
-const rawContent = (node) => {
- /**
- * @param {ExporterOptions} options
- */
- async function * contentGenerator (options = {}) {
- const {
- offset,
- length
- } = validateOffsetAndLength(node.length, options.offset, options.length)
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
- yield extractDataFromBlock(node, 0, offset, offset + length)
+const validateOffsetAndLength = (size, offset, length) => {
+ if (!offset) {
+ offset = 0;
}
-
- return contentGenerator
-}
-
-/**
- * @type {import('../types').Resolver}
- */
-const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {
- if (toResolve.length) {
- throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND')
+ if (offset < 0) {
+ throw errCode__default['default'](new Error('Offset must be greater than or equal to 0'), 'ERR_INVALID_PARAMS');
}
-
- const block = await blockstore.get(cid, options)
-
- return {
- entry: {
- type: 'raw',
- name,
- path,
- cid,
- content: rawContent(block),
- depth,
- size: block.length,
- node: block
- }
+ if (offset > size) {
+ throw errCode__default['default'](new Error('Offset must be less than the file size'), 'ERR_INVALID_PARAMS');
}
-}
-
-module.exports = resolve
-
-
-/***/ }),
-
-/***/ 8703:
-/***/ ((module) => {
-
-"use strict";
-
-
-/**
- * @typedef {import('../../../types').ExporterOptions} ExporterOptions
- * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent
- * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver
- */
-
-/**
- * @type {UnixfsV1Resolver}
- */
-const directoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {
- /**
- * @param {ExporterOptions} [options]
- * @returns {UnixfsV1DirectoryContent}
- */
- async function * yieldDirectoryContent (options = {}) {
- const offset = options.offset || 0
- const length = options.length || node.Links.length
- const links = node.Links.slice(offset, length)
-
- for (const link of links) {
- const result = await resolve(link.Hash, link.Name || '', `${path}/${link.Name || ''}`, [], depth + 1, blockstore, options)
-
- if (result.entry) {
- yield result.entry
- }
- }
+ if (!length && length !== 0) {
+ length = size - offset;
}
+ if (length < 0) {
+ throw errCode__default['default'](new Error('Length must be greater than or equal to 0'), 'ERR_INVALID_PARAMS');
+ }
+ if (offset + length > size) {
+ length = size - offset;
+ }
+ return {
+ offset,
+ length
+ };
+};
- return yieldDirectoryContent
-}
-
-module.exports = directoryContent
+module.exports = validateOffsetAndLength;
/***/ }),
-/***/ 7373:
+/***/ 8452:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const extractDataFromBlock = __nccwpck_require__(2553)
-const validateOffsetAndLength = __nccwpck_require__(4178)
-const { UnixFS } = __nccwpck_require__(7381)
-const errCode = __nccwpck_require__(2997)
-const dagPb = __nccwpck_require__(8012)
-const dagCbor = __nccwpck_require__(6477)
-const raw = __nccwpck_require__(2048)
-
-/**
- * @typedef {import('../../../types').ExporterOptions} ExporterOptions
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {import('@ipld/dag-pb').PBNode} PBNode
- *
- * @param {Blockstore} blockstore
- * @param {PBNode} node
- * @param {number} start
- * @param {number} end
- * @param {number} streamPosition
- * @param {ExporterOptions} options
- * @returns {AsyncIterable}
- */
-async function * emitBytes (blockstore, node, start, end, streamPosition = 0, options) {
- // a `raw` node
- if (node instanceof Uint8Array) {
- const buf = extractDataFromBlock(node, streamPosition, start, end)
-
- if (buf.length) {
- yield buf
- }
-
- streamPosition += buf.length
-
- return streamPosition
- }
-
- if (node.Data == null) {
- throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS')
- }
-
- let file
-
- try {
- file = UnixFS.unmarshal(node.Data)
- } catch (err) {
- throw errCode(err, 'ERR_NOT_UNIXFS')
- }
-
- // might be a unixfs `raw` node or have data on intermediate nodes
- if (file.data && file.data.length) {
- const buf = extractDataFromBlock(file.data, streamPosition, start, end)
-
- if (buf.length) {
- yield buf
- }
-
- streamPosition += file.data.length
- }
-
- let childStart = streamPosition
+var BufferList = __nccwpck_require__(8386);
- // work out which child nodes contain the requested data
- for (let i = 0; i < node.Links.length; i++) {
- const childLink = node.Links[i]
- const childEnd = streamPosition + file.blockSizes[i]
-
- if ((start >= childStart && start < childEnd) || // child has offset byte
- (end > childStart && end <= childEnd) || // child has end byte
- (start < childStart && end > childEnd)) { // child is between offset and end bytes
- const block = await blockstore.get(childLink.Hash, {
- signal: options.signal
- })
- let child
- switch (childLink.Hash.code) {
- case dagPb.code:
- child = await dagPb.decode(block)
- break
- case raw.code:
- child = block
- break
- case dagCbor.code:
- child = await dagCbor.decode(block)
- break
- default:
- throw Error(`Unsupported codec: ${childLink.Hash.code}`)
- }
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- for await (const buf of emitBytes(blockstore, child, start, end, streamPosition, options)) {
- streamPosition += buf.length
-
- yield buf
- }
- }
-
- streamPosition = childEnd
- childStart = childEnd + 1
- }
-}
-
-/**
- * @type {import('../').UnixfsV1Resolver}
- */
-const fileContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {
- /**
- * @param {ExporterOptions} options
- */
- function yieldFileContent (options = {}) {
- const fileSize = unixfs.fileSize()
-
- if (fileSize === undefined) {
- throw new Error('File was a directory')
- }
-
- const {
- offset,
- length
- } = validateOffsetAndLength(fileSize, options.offset, options.length)
-
- const start = offset
- const end = offset + length
-
- return emitBytes(blockstore, node, start, end, 0, options)
- }
-
- return yieldFileContent
-}
-
-module.exports = fileContent
-
-
-/***/ }),
-
-/***/ 3353:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const { decode } = __nccwpck_require__(8012)
-
-/**
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {import('../../../types').ExporterOptions} ExporterOptions
- * @typedef {import('../../../types').Resolve} Resolve
- * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent
- * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver
- * @typedef {import('@ipld/dag-pb').PBNode} PBNode
- */
-
-/**
- * @type {UnixfsV1Resolver}
- */
-const hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {
- /**
- * @param {ExporterOptions} options
- *
- */
- function yieldHamtDirectoryContent (options = {}) {
- return listDirectory(node, path, resolve, depth, blockstore, options)
- }
-
- return yieldHamtDirectoryContent
-}
-
-/**
- * @param {PBNode} node
- * @param {string} path
- * @param {Resolve} resolve
- * @param {number} depth
- * @param {Blockstore} blockstore
- * @param {ExporterOptions} options
- *
- * @returns {UnixfsV1DirectoryContent}
- */
-async function * listDirectory (node, path, resolve, depth, blockstore, options) {
- const links = node.Links
-
- for (const link of links) {
- const name = link.Name != null ? link.Name.substring(2) : null
-
- if (name) {
- const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, blockstore, options)
-
- yield result.entry
- } else {
- // descend into subshard
- const block = await blockstore.get(link.Hash)
- node = decode(block)
-
- for await (const file of listDirectory(node, path, resolve, depth, blockstore, options)) {
- yield file
- }
- }
- }
-}
-
-module.exports = hamtShardedDirectoryContent
-
-
-/***/ }),
-
-/***/ 1799:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const errCode = __nccwpck_require__(2997)
-const { UnixFS } = __nccwpck_require__(7381)
-const findShardCid = __nccwpck_require__(754)
-const { decode } = __nccwpck_require__(8012)
-
-/**
- * @typedef {import('../../types').Resolve} Resolve
- * @typedef {import('../../types').Resolver} Resolver
- * @typedef {import('../../types').UnixfsV1Resolver} UnixfsV1Resolver
- * @typedef {import('@ipld/dag-pb').PBNode} PBNode
- */
-
-/**
- * @param {PBNode} node
- * @param {string} name
- */
-const findLinkCid = (node, name) => {
- const link = node.Links.find(link => link.Name === name)
-
- return link && link.Hash
-}
-
-/**
- * @type {{ [key: string]: UnixfsV1Resolver }}
- */
-const contentExporters = {
- raw: __nccwpck_require__(7373),
- file: __nccwpck_require__(7373),
- directory: __nccwpck_require__(8703),
- 'hamt-sharded-directory': __nccwpck_require__(3353),
- metadata: (cid, node, unixfs, path, resolve, depth, blockstore) => {
- return () => []
- },
- symlink: (cid, node, unixfs, path, resolve, depth, blockstore) => {
- return () => []
- }
-}
-
-/**
- * @type {Resolver}
- */
-const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {
- const block = await blockstore.get(cid, options)
- const node = decode(block)
- let unixfs
- let next
-
- if (!name) {
- name = cid.toString()
- }
-
- if (node.Data == null) {
- throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS')
- }
-
- try {
- unixfs = UnixFS.unmarshal(node.Data)
- } catch (err) {
- // non-UnixFS dag-pb node? It could happen.
- throw errCode(err, 'ERR_NOT_UNIXFS')
- }
-
- if (!path) {
- path = name
- }
-
- if (toResolve.length) {
- let linkCid
-
- if (unixfs && unixfs.type === 'hamt-sharded-directory') {
- // special case - unixfs v1 hamt shards
- linkCid = await findShardCid(node, toResolve[0], blockstore)
- } else {
- linkCid = findLinkCid(node, toResolve[0])
- }
-
- if (!linkCid) {
- throw errCode(new Error('file does not exist'), 'ERR_NOT_FOUND')
- }
-
- // remove the path component we have resolved
- const nextName = toResolve.shift()
- const nextPath = `${path}/${nextName}`
-
- next = {
- cid: linkCid,
- toResolve,
- name: nextName || '',
- path: nextPath
- }
- }
-
- return {
- entry: {
- type: unixfs.isDirectory() ? 'directory' : 'file',
- name,
- path,
- cid,
- // @ts-ignore
- content: contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, blockstore),
- unixfs,
- depth,
- node,
- size: unixfs.fileSize()
- },
- next
- }
-}
-
-module.exports = unixFsResolver
-
-
-/***/ }),
-
-/***/ 2553:
-/***/ ((module) => {
-
-"use strict";
-
-
-/**
- * @param {Uint8Array} block
- * @param {number} blockStart
- * @param {number} requestedStart
- * @param {number} requestedEnd
- */
-module.exports = function extractDataFromBlock (block, blockStart, requestedStart, requestedEnd) {
- const blockLength = block.length
- const blockEnd = blockStart + blockLength
-
- if (requestedStart >= blockEnd || requestedEnd < blockStart) {
- // If we are looking for a byte range that is starts after the start of the block,
- // return an empty block. This can happen when internal nodes contain data
- return new Uint8Array(0)
- }
-
- if (requestedEnd >= blockStart && requestedEnd < blockEnd) {
- // If the end byte is in the current block, truncate the block to the end byte
- block = block.slice(0, requestedEnd - blockStart)
- }
-
- if (requestedStart >= blockStart && requestedStart < blockEnd) {
- // If the start byte is in the current block, skip to the start byte
- block = block.slice(requestedStart - blockStart)
- }
-
- return block
-}
-
-
-/***/ }),
-
-/***/ 754:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const { Bucket, createHAMT } = __nccwpck_require__(7820)
-const { decode } = __nccwpck_require__(8012)
-// @ts-ignore - no types available
-const mur = __nccwpck_require__(7214)
-const uint8ArrayFromString = __nccwpck_require__(828)
-
-/**
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {import('multiformats/cid').CID} CID
- * @typedef {import('../types').ExporterOptions} ExporterOptions
- * @typedef {import('@ipld/dag-pb').PBNode} PBNode
- * @typedef {import('@ipld/dag-pb').PBLink} PBLink
- */
-
-// FIXME: this is copy/pasted from ipfs-unixfs-importer/src/options.js
-/**
- * @param {Uint8Array} buf
- */
-const hashFn = async function (buf) {
- return uint8ArrayFromString(mur.x64.hash128(buf), 'base16').slice(0, 8).reverse()
-}
-
-/**
- * @param {PBLink[]} links
- * @param {Bucket} bucket
- * @param {Bucket} rootBucket
- */
-const addLinksToHamtBucket = (links, bucket, rootBucket) => {
- return Promise.all(
- links.map(link => {
- if (link.Name == null) {
- // TODO(@rvagg): what do? this is technically possible
- throw new Error('Unexpected Link without a Name')
- }
- if (link.Name.length === 2) {
- const pos = parseInt(link.Name, 16)
-
- return bucket._putObjectAt(pos, new Bucket({
- hash: rootBucket._options.hash,
- bits: rootBucket._options.bits
- }, bucket, pos))
- }
-
- return rootBucket.put(link.Name.substring(2), true)
- })
- )
-}
-
-/**
- * @param {number} position
- */
-const toPrefix = (position) => {
- return position
- .toString(16)
- .toUpperCase()
- .padStart(2, '0')
- .substring(0, 2)
-}
-
-/**
- * @param {import('hamt-sharding').Bucket.BucketPosition} position
- */
-const toBucketPath = (position) => {
- let bucket = position.bucket
- const path = []
-
- while (bucket._parent) {
- path.push(bucket)
-
- bucket = bucket._parent
- }
-
- path.push(bucket)
-
- return path.reverse()
-}
-
-/**
- * @typedef {object} ShardTraversalContext
- * @property {number} hamtDepth
- * @property {Bucket} rootBucket
- * @property {Bucket} lastBucket
- *
- * @param {PBNode} node
- * @param {string} name
- * @param {Blockstore} blockstore
- * @param {ShardTraversalContext} [context]
- * @param {ExporterOptions} [options]
- * @returns {Promise}
- */
-const findShardCid = async (node, name, blockstore, context, options) => {
- if (!context) {
- const rootBucket = createHAMT({
- hashFn
- })
-
- context = {
- rootBucket,
- hamtDepth: 1,
- lastBucket: rootBucket
- }
- }
-
- await addLinksToHamtBucket(node.Links, context.lastBucket, context.rootBucket)
-
- const position = await context.rootBucket._findNewBucketAndPos(name)
- let prefix = toPrefix(position.pos)
- const bucketPath = toBucketPath(position)
-
- if (bucketPath.length > context.hamtDepth) {
- context.lastBucket = bucketPath[context.hamtDepth]
-
- prefix = toPrefix(context.lastBucket._posAtParent)
- }
-
- const link = node.Links.find(link => {
- if (link.Name == null) {
- return false
- }
-
- const entryPrefix = link.Name.substring(0, 2)
- const entryName = link.Name.substring(2)
-
- if (entryPrefix !== prefix) {
- // not the entry or subshard we're looking for
- return false
- }
-
- if (entryName && entryName !== name) {
- // not the entry we're looking for
- return false
- }
-
- return true
- })
-
- if (!link) {
- return null
- }
-
- if (link.Name != null && link.Name.substring(2) === name) {
- return link.Hash
- }
-
- context.hamtDepth++
-
- const block = await blockstore.get(link.Hash, options)
- node = decode(block)
-
- return findShardCid(node, name, blockstore, context, options)
-}
-
-module.exports = findShardCid
-
-
-/***/ }),
-
-/***/ 4178:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const errCode = __nccwpck_require__(2997)
-
-/**
- * @param {number} size
- * @param {number} [offset]
- * @param {number} [length]
- */
-const validateOffsetAndLength = (size, offset, length) => {
- if (!offset) {
- offset = 0
- }
-
- if (offset < 0) {
- throw errCode(new Error('Offset must be greater than or equal to 0'), 'ERR_INVALID_PARAMS')
- }
-
- if (offset > size) {
- throw errCode(new Error('Offset must be less than the file size'), 'ERR_INVALID_PARAMS')
- }
-
- if (!length && length !== 0) {
- length = size - offset
- }
-
- if (length < 0) {
- throw errCode(new Error('Length must be greater than or equal to 0'), 'ERR_INVALID_PARAMS')
- }
-
- if (offset + length > size) {
- length = size - offset
- }
-
- return {
- offset,
- length
- }
-}
-
-module.exports = validateOffsetAndLength
-
-
-/***/ }),
-
-/***/ 8386:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const { Buffer } = __nccwpck_require__(4293)
-const symbol = Symbol.for('BufferList')
-
-function BufferList (buf) {
- if (!(this instanceof BufferList)) {
- return new BufferList(buf)
- }
-
- BufferList._init.call(this, buf)
-}
-
-BufferList._init = function _init (buf) {
- Object.defineProperty(this, symbol, { value: true })
-
- this._bufs = []
- this.length = 0
-
- if (buf) {
- this.append(buf)
- }
-}
-
-BufferList.prototype._new = function _new (buf) {
- return new BufferList(buf)
-}
-
-BufferList.prototype._offset = function _offset (offset) {
- if (offset === 0) {
- return [0, 0]
- }
-
- let tot = 0
-
- for (let i = 0; i < this._bufs.length; i++) {
- const _t = tot + this._bufs[i].length
- if (offset < _t || i === this._bufs.length - 1) {
- return [i, offset - tot]
- }
- tot = _t
- }
-}
-
-BufferList.prototype._reverseOffset = function (blOffset) {
- const bufferId = blOffset[0]
- let offset = blOffset[1]
-
- for (let i = 0; i < bufferId; i++) {
- offset += this._bufs[i].length
- }
-
- return offset
-}
-
-BufferList.prototype.get = function get (index) {
- if (index > this.length || index < 0) {
- return undefined
- }
-
- const offset = this._offset(index)
-
- return this._bufs[offset[0]][offset[1]]
-}
-
-BufferList.prototype.slice = function slice (start, end) {
- if (typeof start === 'number' && start < 0) {
- start += this.length
- }
-
- if (typeof end === 'number' && end < 0) {
- end += this.length
- }
-
- return this.copy(null, 0, start, end)
-}
-
-BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
- if (typeof srcStart !== 'number' || srcStart < 0) {
- srcStart = 0
- }
-
- if (typeof srcEnd !== 'number' || srcEnd > this.length) {
- srcEnd = this.length
- }
-
- if (srcStart >= this.length) {
- return dst || Buffer.alloc(0)
- }
-
- if (srcEnd <= 0) {
- return dst || Buffer.alloc(0)
- }
-
- const copy = !!dst
- const off = this._offset(srcStart)
- const len = srcEnd - srcStart
- let bytes = len
- let bufoff = (copy && dstStart) || 0
- let start = off[1]
-
- // copy/slice everything
- if (srcStart === 0 && srcEnd === this.length) {
- if (!copy) {
- // slice, but full concat if multiple buffers
- return this._bufs.length === 1
- ? this._bufs[0]
- : Buffer.concat(this._bufs, this.length)
- }
-
- // copy, need to copy individual buffers
- for (let i = 0; i < this._bufs.length; i++) {
- this._bufs[i].copy(dst, bufoff)
- bufoff += this._bufs[i].length
- }
-
- return dst
- }
-
- // easy, cheap case where it's a subset of one of the buffers
- if (bytes <= this._bufs[off[0]].length - start) {
- return copy
- ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
- : this._bufs[off[0]].slice(start, start + bytes)
- }
-
- if (!copy) {
- // a slice, we need something to copy in to
- dst = Buffer.allocUnsafe(len)
- }
-
- for (let i = off[0]; i < this._bufs.length; i++) {
- const l = this._bufs[i].length - start
-
- if (bytes > l) {
- this._bufs[i].copy(dst, bufoff, start)
- bufoff += l
- } else {
- this._bufs[i].copy(dst, bufoff, start, start + bytes)
- bufoff += l
- break
- }
-
- bytes -= l
-
- if (start) {
- start = 0
- }
- }
-
- // safeguard so that we don't return uninitialized memory
- if (dst.length > bufoff) return dst.slice(0, bufoff)
-
- return dst
-}
-
-BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
- start = start || 0
- end = typeof end !== 'number' ? this.length : end
-
- if (start < 0) {
- start += this.length
- }
-
- if (end < 0) {
- end += this.length
- }
-
- if (start === end) {
- return this._new()
- }
-
- const startOffset = this._offset(start)
- const endOffset = this._offset(end)
- const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
-
- if (endOffset[1] === 0) {
- buffers.pop()
- } else {
- buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1])
- }
-
- if (startOffset[1] !== 0) {
- buffers[0] = buffers[0].slice(startOffset[1])
- }
-
- return this._new(buffers)
-}
-
-BufferList.prototype.toString = function toString (encoding, start, end) {
- return this.slice(start, end).toString(encoding)
-}
-
-BufferList.prototype.consume = function consume (bytes) {
- // first, normalize the argument, in accordance with how Buffer does it
- bytes = Math.trunc(bytes)
- // do nothing if not a positive number
- if (Number.isNaN(bytes) || bytes <= 0) return this
-
- while (this._bufs.length) {
- if (bytes >= this._bufs[0].length) {
- bytes -= this._bufs[0].length
- this.length -= this._bufs[0].length
- this._bufs.shift()
- } else {
- this._bufs[0] = this._bufs[0].slice(bytes)
- this.length -= bytes
- break
- }
- }
-
- return this
-}
-
-BufferList.prototype.duplicate = function duplicate () {
- const copy = this._new()
-
- for (let i = 0; i < this._bufs.length; i++) {
- copy.append(this._bufs[i])
- }
-
- return copy
-}
-
-BufferList.prototype.append = function append (buf) {
- if (buf == null) {
- return this
- }
-
- if (buf.buffer) {
- // append a view of the underlying ArrayBuffer
- this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength))
- } else if (Array.isArray(buf)) {
- for (let i = 0; i < buf.length; i++) {
- this.append(buf[i])
- }
- } else if (this._isBufferList(buf)) {
- // unwrap argument into individual BufferLists
- for (let i = 0; i < buf._bufs.length; i++) {
- this.append(buf._bufs[i])
- }
- } else {
- // coerce number arguments to strings, since Buffer(number) does
- // uninitialized memory allocation
- if (typeof buf === 'number') {
- buf = buf.toString()
- }
-
- this._appendBuffer(Buffer.from(buf))
- }
-
- return this
-}
-
-BufferList.prototype._appendBuffer = function appendBuffer (buf) {
- this._bufs.push(buf)
- this.length += buf.length
-}
-
-BufferList.prototype.indexOf = function (search, offset, encoding) {
- if (encoding === undefined && typeof offset === 'string') {
- encoding = offset
- offset = undefined
- }
-
- if (typeof search === 'function' || Array.isArray(search)) {
- throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
- } else if (typeof search === 'number') {
- search = Buffer.from([search])
- } else if (typeof search === 'string') {
- search = Buffer.from(search, encoding)
- } else if (this._isBufferList(search)) {
- search = search.slice()
- } else if (Array.isArray(search.buffer)) {
- search = Buffer.from(search.buffer, search.byteOffset, search.byteLength)
- } else if (!Buffer.isBuffer(search)) {
- search = Buffer.from(search)
- }
-
- offset = Number(offset || 0)
-
- if (isNaN(offset)) {
- offset = 0
- }
-
- if (offset < 0) {
- offset = this.length + offset
- }
-
- if (offset < 0) {
- offset = 0
- }
-
- if (search.length === 0) {
- return offset > this.length ? this.length : offset
- }
-
- const blOffset = this._offset(offset)
- let blIndex = blOffset[0] // index of which internal buffer we're working on
- let buffOffset = blOffset[1] // offset of the internal buffer we're working on
-
- // scan over each buffer
- for (; blIndex < this._bufs.length; blIndex++) {
- const buff = this._bufs[blIndex]
-
- while (buffOffset < buff.length) {
- const availableWindow = buff.length - buffOffset
-
- if (availableWindow >= search.length) {
- const nativeSearchResult = buff.indexOf(search, buffOffset)
-
- if (nativeSearchResult !== -1) {
- return this._reverseOffset([blIndex, nativeSearchResult])
- }
-
- buffOffset = buff.length - search.length + 1 // end of native search window
- } else {
- const revOffset = this._reverseOffset([blIndex, buffOffset])
-
- if (this._match(revOffset, search)) {
- return revOffset
- }
-
- buffOffset++
- }
- }
-
- buffOffset = 0
- }
-
- return -1
-}
-
-BufferList.prototype._match = function (offset, search) {
- if (this.length - offset < search.length) {
- return false
- }
-
- for (let searchOffset = 0; searchOffset < search.length; searchOffset++) {
- if (this.get(offset + searchOffset) !== search[searchOffset]) {
- return false
- }
- }
- return true
-}
-
-;(function () {
- const methods = {
- readDoubleBE: 8,
- readDoubleLE: 8,
- readFloatBE: 4,
- readFloatLE: 4,
- readInt32BE: 4,
- readInt32LE: 4,
- readUInt32BE: 4,
- readUInt32LE: 4,
- readInt16BE: 2,
- readInt16LE: 2,
- readUInt16BE: 2,
- readUInt16LE: 2,
- readInt8: 1,
- readUInt8: 1,
- readIntBE: null,
- readIntLE: null,
- readUIntBE: null,
- readUIntLE: null
- }
-
- for (const m in methods) {
- (function (m) {
- if (methods[m] === null) {
- BufferList.prototype[m] = function (offset, byteLength) {
- return this.slice(offset, offset + byteLength)[m](0, byteLength)
- }
- } else {
- BufferList.prototype[m] = function (offset = 0) {
- return this.slice(offset, offset + methods[m])[m](0)
- }
- }
- }(m))
- }
-}())
-
-// Used internally by the class and also as an indicator of this object being
-// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser
-// environment because there could be multiple different copies of the
-// BufferList class and some `BufferList`s might be `BufferList`s.
-BufferList.prototype._isBufferList = function _isBufferList (b) {
- return b instanceof BufferList || BufferList.isBufferList(b)
-}
-
-BufferList.isBufferList = function isBufferList (b) {
- return b != null && b[symbol]
-}
-
-module.exports = BufferList
-
-
-/***/ }),
-
-/***/ 4171:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const {
- Data: PBData
-} = __nccwpck_require__(8262)
-const errcode = __nccwpck_require__(2997)
-
-/**
- * @typedef {import('./types').Mtime} Mtime
- * @typedef {import('./types').MtimeLike} MtimeLike
- */
-
-const types = [
- 'raw',
- 'directory',
- 'file',
- 'metadata',
- 'symlink',
- 'hamt-sharded-directory'
-]
-
-const dirTypes = [
- 'directory',
- 'hamt-sharded-directory'
-]
-
-const DEFAULT_FILE_MODE = parseInt('0644', 8)
-const DEFAULT_DIRECTORY_MODE = parseInt('0755', 8)
-
-/**
- * @param {string | number | undefined} [mode]
- */
-function parseMode (mode) {
- if (mode == null) {
- return undefined
- }
-
- if (typeof mode === 'number') {
- return mode & 0xFFF
- }
-
- mode = mode.toString()
-
- if (mode.substring(0, 1) === '0') {
- // octal string
- return parseInt(mode, 8) & 0xFFF
- }
-
- // decimal string
- return parseInt(mode, 10) & 0xFFF
-}
-
-/**
- * @param {any} input
- */
-function parseMtime (input) {
- if (input == null) {
- return undefined
- }
-
- /** @type {Mtime | undefined} */
- let mtime
-
- // { secs, nsecs }
- if (input.secs != null) {
- mtime = {
- secs: input.secs,
- nsecs: input.nsecs
- }
- }
-
- // UnixFS TimeSpec
- if (input.Seconds != null) {
- mtime = {
- secs: input.Seconds,
- nsecs: input.FractionalNanoseconds
- }
- }
-
- // process.hrtime()
- if (Array.isArray(input)) {
- mtime = {
- secs: input[0],
- nsecs: input[1]
- }
- }
-
- // Javascript Date
- if (input instanceof Date) {
- const ms = input.getTime()
- const secs = Math.floor(ms / 1000)
-
- mtime = {
- secs: secs,
- nsecs: (ms - (secs * 1000)) * 1000
- }
- }
-
- /*
- TODO: https://github.com/ipfs/aegir/issues/487
-
- // process.hrtime.bigint()
- if (input instanceof BigInt) {
- const secs = input / BigInt(1e9)
- const nsecs = input - (secs * BigInt(1e9))
-
- mtime = {
- secs: parseInt(secs.toString()),
- nsecs: parseInt(nsecs.toString())
- }
- }
- */
-
- if (!Object.prototype.hasOwnProperty.call(mtime, 'secs')) {
- return undefined
- }
-
- if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) {
- throw errcode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS')
- }
-
- return mtime
-}
-
-class Data {
- /**
- * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md
- *
- * @param {Uint8Array} marshaled
- */
- static unmarshal (marshaled) {
- const message = PBData.decode(marshaled)
- const decoded = PBData.toObject(message, {
- defaults: false,
- arrays: true,
- longs: Number,
- objects: false
- })
-
- const data = new Data({
- type: types[decoded.Type],
- data: decoded.Data,
- blockSizes: decoded.blocksizes,
- mode: decoded.mode,
- mtime: decoded.mtime
- ? {
- secs: decoded.mtime.Seconds,
- nsecs: decoded.mtime.FractionalNanoseconds
- }
- : undefined
- })
-
- // make sure we honour the original mode
- data._originalMode = decoded.mode || 0
-
- return data
- }
-
- /**
- * @param {object} [options]
- * @param {string} [options.type='file']
- * @param {Uint8Array} [options.data]
- * @param {number[]} [options.blockSizes]
- * @param {number} [options.hashType]
- * @param {number} [options.fanout]
- * @param {MtimeLike | null} [options.mtime]
- * @param {number | string} [options.mode]
- */
- constructor (options = {
- type: 'file'
- }) {
- const {
- type,
- data,
- blockSizes,
- hashType,
- fanout,
- mtime,
- mode
- } = options
-
- if (type && !types.includes(type)) {
- throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')
- }
-
- this.type = type || 'file'
- this.data = data
- this.hashType = hashType
- this.fanout = fanout
-
- /** @type {number[]} */
- this.blockSizes = blockSizes || []
- this._originalMode = 0
- this.mode = parseMode(mode)
-
- if (mtime) {
- this.mtime = parseMtime(mtime)
-
- if (this.mtime && !this.mtime.nsecs) {
- this.mtime.nsecs = 0
- }
- }
- }
-
- /**
- * @param {number | undefined} mode
- */
- set mode (mode) {
- this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE
-
- const parsedMode = parseMode(mode)
-
- if (parsedMode !== undefined) {
- this._mode = parsedMode
- }
- }
-
- /**
- * @returns {number | undefined}
- */
- get mode () {
- return this._mode
- }
-
- isDirectory () {
- return Boolean(this.type && dirTypes.includes(this.type))
- }
-
- /**
- * @param {number} size
- */
- addBlockSize (size) {
- this.blockSizes.push(size)
- }
-
- /**
- * @param {number} index
- */
- removeBlockSize (index) {
- this.blockSizes.splice(index, 1)
- }
-
- /**
- * Returns `0` for directories or `data.length + sum(blockSizes)` for everything else
- */
- fileSize () {
- if (this.isDirectory()) {
- // dirs don't have file size
- return 0
- }
-
- let sum = 0
- this.blockSizes.forEach((size) => {
- sum += size
- })
-
- if (this.data) {
- sum += this.data.length
- }
-
- return sum
- }
-
- /**
- * encode to protobuf Uint8Array
- */
- marshal () {
- let type
-
- switch (this.type) {
- case 'raw': type = PBData.DataType.Raw; break
- case 'directory': type = PBData.DataType.Directory; break
- case 'file': type = PBData.DataType.File; break
- case 'metadata': type = PBData.DataType.Metadata; break
- case 'symlink': type = PBData.DataType.Symlink; break
- case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break
- default:
- throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')
- }
-
- let data = this.data
-
- if (!this.data || !this.data.length) {
- data = undefined
- }
-
- let mode
-
- if (this.mode != null) {
- mode = (this._originalMode & 0xFFFFF000) | (parseMode(this.mode) || 0)
-
- if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {
- mode = undefined
- }
-
- if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {
- mode = undefined
- }
- }
-
- let mtime
-
- if (this.mtime != null) {
- const parsed = parseMtime(this.mtime)
-
- if (parsed) {
- mtime = {
- Seconds: parsed.secs,
- FractionalNanoseconds: parsed.nsecs
- }
-
- if (mtime.FractionalNanoseconds === 0) {
- delete mtime.FractionalNanoseconds
- }
- }
- }
-
- const pbData = {
- Type: type,
- Data: data,
- filesize: this.isDirectory() ? undefined : this.fileSize(),
- blocksizes: this.blockSizes,
- hashType: this.hashType,
- fanout: this.fanout,
- mode,
- mtime
- }
-
- return PBData.encode(pbData).finish()
- }
-}
-
-module.exports = {
- UnixFS: Data,
- parseMode,
- parseMtime
-}
-
-
-/***/ }),
-
-/***/ 8262:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-/*eslint-disable*/
-
-
-var $protobuf = __nccwpck_require__(6916);
-
-// Common aliases
-var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
-
-// Exported root namespace
-var $root = $protobuf.roots["ipfs-unixfs"] || ($protobuf.roots["ipfs-unixfs"] = {});
-
-$root.Data = (function() {
-
- /**
- * Properties of a Data.
- * @exports IData
- * @interface IData
- * @property {Data.DataType} Type Data Type
- * @property {Uint8Array|null} [Data] Data Data
- * @property {number|null} [filesize] Data filesize
- * @property {Array.|null} [blocksizes] Data blocksizes
- * @property {number|null} [hashType] Data hashType
- * @property {number|null} [fanout] Data fanout
- * @property {number|null} [mode] Data mode
- * @property {IUnixTime|null} [mtime] Data mtime
- */
-
- /**
- * Constructs a new Data.
- * @exports Data
- * @classdesc Represents a Data.
- * @implements IData
- * @constructor
- * @param {IData=} [p] Properties to set
- */
- function Data(p) {
- this.blocksizes = [];
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
- }
-
- /**
- * Data Type.
- * @member {Data.DataType} Type
- * @memberof Data
- * @instance
- */
- Data.prototype.Type = 0;
-
- /**
- * Data Data.
- * @member {Uint8Array} Data
- * @memberof Data
- * @instance
- */
- Data.prototype.Data = $util.newBuffer([]);
-
- /**
- * Data filesize.
- * @member {number} filesize
- * @memberof Data
- * @instance
- */
- Data.prototype.filesize = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
-
- /**
- * Data blocksizes.
- * @member {Array.} blocksizes
- * @memberof Data
- * @instance
- */
- Data.prototype.blocksizes = $util.emptyArray;
-
- /**
- * Data hashType.
- * @member {number} hashType
- * @memberof Data
- * @instance
- */
- Data.prototype.hashType = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
-
- /**
- * Data fanout.
- * @member {number} fanout
- * @memberof Data
- * @instance
- */
- Data.prototype.fanout = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
-
- /**
- * Data mode.
- * @member {number} mode
- * @memberof Data
- * @instance
- */
- Data.prototype.mode = 0;
-
- /**
- * Data mtime.
- * @member {IUnixTime|null|undefined} mtime
- * @memberof Data
- * @instance
- */
- Data.prototype.mtime = null;
-
- /**
- * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages.
- * @function encode
- * @memberof Data
- * @static
- * @param {IData} m Data message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- Data.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- w.uint32(8).int32(m.Type);
- if (m.Data != null && Object.hasOwnProperty.call(m, "Data"))
- w.uint32(18).bytes(m.Data);
- if (m.filesize != null && Object.hasOwnProperty.call(m, "filesize"))
- w.uint32(24).uint64(m.filesize);
- if (m.blocksizes != null && m.blocksizes.length) {
- for (var i = 0; i < m.blocksizes.length; ++i)
- w.uint32(32).uint64(m.blocksizes[i]);
- }
- if (m.hashType != null && Object.hasOwnProperty.call(m, "hashType"))
- w.uint32(40).uint64(m.hashType);
- if (m.fanout != null && Object.hasOwnProperty.call(m, "fanout"))
- w.uint32(48).uint64(m.fanout);
- if (m.mode != null && Object.hasOwnProperty.call(m, "mode"))
- w.uint32(56).uint32(m.mode);
- if (m.mtime != null && Object.hasOwnProperty.call(m, "mtime"))
- $root.UnixTime.encode(m.mtime, w.uint32(66).fork()).ldelim();
- return w;
- };
-
- /**
- * Decodes a Data message from the specified reader or buffer.
- * @function decode
- * @memberof Data
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {Data} Data
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- Data.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.Data();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.Type = r.int32();
- break;
- case 2:
- m.Data = r.bytes();
- break;
- case 3:
- m.filesize = r.uint64();
- break;
- case 4:
- if (!(m.blocksizes && m.blocksizes.length))
- m.blocksizes = [];
- if ((t & 7) === 2) {
- var c2 = r.uint32() + r.pos;
- while (r.pos < c2)
- m.blocksizes.push(r.uint64());
- } else
- m.blocksizes.push(r.uint64());
- break;
- case 5:
- m.hashType = r.uint64();
- break;
- case 6:
- m.fanout = r.uint64();
- break;
- case 7:
- m.mode = r.uint32();
- break;
- case 8:
- m.mtime = $root.UnixTime.decode(r, r.uint32());
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- if (!m.hasOwnProperty("Type"))
- throw $util.ProtocolError("missing required 'Type'", { instance: m });
- return m;
- };
-
- /**
- * Creates a Data message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof Data
- * @static
- * @param {Object.} d Plain object
- * @returns {Data} Data
- */
- Data.fromObject = function fromObject(d) {
- if (d instanceof $root.Data)
- return d;
- var m = new $root.Data();
- switch (d.Type) {
- case "Raw":
- case 0:
- m.Type = 0;
- break;
- case "Directory":
- case 1:
- m.Type = 1;
- break;
- case "File":
- case 2:
- m.Type = 2;
- break;
- case "Metadata":
- case 3:
- m.Type = 3;
- break;
- case "Symlink":
- case 4:
- m.Type = 4;
- break;
- case "HAMTShard":
- case 5:
- m.Type = 5;
- break;
- }
- if (d.Data != null) {
- if (typeof d.Data === "string")
- $util.base64.decode(d.Data, m.Data = $util.newBuffer($util.base64.length(d.Data)), 0);
- else if (d.Data.length)
- m.Data = d.Data;
- }
- if (d.filesize != null) {
- if ($util.Long)
- (m.filesize = $util.Long.fromValue(d.filesize)).unsigned = true;
- else if (typeof d.filesize === "string")
- m.filesize = parseInt(d.filesize, 10);
- else if (typeof d.filesize === "number")
- m.filesize = d.filesize;
- else if (typeof d.filesize === "object")
- m.filesize = new $util.LongBits(d.filesize.low >>> 0, d.filesize.high >>> 0).toNumber(true);
- }
- if (d.blocksizes) {
- if (!Array.isArray(d.blocksizes))
- throw TypeError(".Data.blocksizes: array expected");
- m.blocksizes = [];
- for (var i = 0; i < d.blocksizes.length; ++i) {
- if ($util.Long)
- (m.blocksizes[i] = $util.Long.fromValue(d.blocksizes[i])).unsigned = true;
- else if (typeof d.blocksizes[i] === "string")
- m.blocksizes[i] = parseInt(d.blocksizes[i], 10);
- else if (typeof d.blocksizes[i] === "number")
- m.blocksizes[i] = d.blocksizes[i];
- else if (typeof d.blocksizes[i] === "object")
- m.blocksizes[i] = new $util.LongBits(d.blocksizes[i].low >>> 0, d.blocksizes[i].high >>> 0).toNumber(true);
- }
- }
- if (d.hashType != null) {
- if ($util.Long)
- (m.hashType = $util.Long.fromValue(d.hashType)).unsigned = true;
- else if (typeof d.hashType === "string")
- m.hashType = parseInt(d.hashType, 10);
- else if (typeof d.hashType === "number")
- m.hashType = d.hashType;
- else if (typeof d.hashType === "object")
- m.hashType = new $util.LongBits(d.hashType.low >>> 0, d.hashType.high >>> 0).toNumber(true);
- }
- if (d.fanout != null) {
- if ($util.Long)
- (m.fanout = $util.Long.fromValue(d.fanout)).unsigned = true;
- else if (typeof d.fanout === "string")
- m.fanout = parseInt(d.fanout, 10);
- else if (typeof d.fanout === "number")
- m.fanout = d.fanout;
- else if (typeof d.fanout === "object")
- m.fanout = new $util.LongBits(d.fanout.low >>> 0, d.fanout.high >>> 0).toNumber(true);
- }
- if (d.mode != null) {
- m.mode = d.mode >>> 0;
- }
- if (d.mtime != null) {
- if (typeof d.mtime !== "object")
- throw TypeError(".Data.mtime: object expected");
- m.mtime = $root.UnixTime.fromObject(d.mtime);
- }
- return m;
- };
-
- /**
- * Creates a plain object from a Data message. Also converts values to other types if specified.
- * @function toObject
- * @memberof Data
- * @static
- * @param {Data} m Data
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- Data.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.arrays || o.defaults) {
- d.blocksizes = [];
- }
- if (o.defaults) {
- d.Type = o.enums === String ? "Raw" : 0;
- if (o.bytes === String)
- d.Data = "";
- else {
- d.Data = [];
- if (o.bytes !== Array)
- d.Data = $util.newBuffer(d.Data);
- }
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.filesize = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.filesize = o.longs === String ? "0" : 0;
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.hashType = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.hashType = o.longs === String ? "0" : 0;
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.fanout = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.fanout = o.longs === String ? "0" : 0;
- d.mode = 0;
- d.mtime = null;
- }
- if (m.Type != null && m.hasOwnProperty("Type")) {
- d.Type = o.enums === String ? $root.Data.DataType[m.Type] : m.Type;
- }
- if (m.Data != null && m.hasOwnProperty("Data")) {
- d.Data = o.bytes === String ? $util.base64.encode(m.Data, 0, m.Data.length) : o.bytes === Array ? Array.prototype.slice.call(m.Data) : m.Data;
- }
- if (m.filesize != null && m.hasOwnProperty("filesize")) {
- if (typeof m.filesize === "number")
- d.filesize = o.longs === String ? String(m.filesize) : m.filesize;
- else
- d.filesize = o.longs === String ? $util.Long.prototype.toString.call(m.filesize) : o.longs === Number ? new $util.LongBits(m.filesize.low >>> 0, m.filesize.high >>> 0).toNumber(true) : m.filesize;
- }
- if (m.blocksizes && m.blocksizes.length) {
- d.blocksizes = [];
- for (var j = 0; j < m.blocksizes.length; ++j) {
- if (typeof m.blocksizes[j] === "number")
- d.blocksizes[j] = o.longs === String ? String(m.blocksizes[j]) : m.blocksizes[j];
- else
- d.blocksizes[j] = o.longs === String ? $util.Long.prototype.toString.call(m.blocksizes[j]) : o.longs === Number ? new $util.LongBits(m.blocksizes[j].low >>> 0, m.blocksizes[j].high >>> 0).toNumber(true) : m.blocksizes[j];
- }
- }
- if (m.hashType != null && m.hasOwnProperty("hashType")) {
- if (typeof m.hashType === "number")
- d.hashType = o.longs === String ? String(m.hashType) : m.hashType;
- else
- d.hashType = o.longs === String ? $util.Long.prototype.toString.call(m.hashType) : o.longs === Number ? new $util.LongBits(m.hashType.low >>> 0, m.hashType.high >>> 0).toNumber(true) : m.hashType;
- }
- if (m.fanout != null && m.hasOwnProperty("fanout")) {
- if (typeof m.fanout === "number")
- d.fanout = o.longs === String ? String(m.fanout) : m.fanout;
- else
- d.fanout = o.longs === String ? $util.Long.prototype.toString.call(m.fanout) : o.longs === Number ? new $util.LongBits(m.fanout.low >>> 0, m.fanout.high >>> 0).toNumber(true) : m.fanout;
- }
- if (m.mode != null && m.hasOwnProperty("mode")) {
- d.mode = m.mode;
- }
- if (m.mtime != null && m.hasOwnProperty("mtime")) {
- d.mtime = $root.UnixTime.toObject(m.mtime, o);
- }
- return d;
- };
-
- /**
- * Converts this Data to JSON.
- * @function toJSON
- * @memberof Data
- * @instance
- * @returns {Object.} JSON object
- */
- Data.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
-
- /**
- * DataType enum.
- * @name Data.DataType
- * @enum {number}
- * @property {number} Raw=0 Raw value
- * @property {number} Directory=1 Directory value
- * @property {number} File=2 File value
- * @property {number} Metadata=3 Metadata value
- * @property {number} Symlink=4 Symlink value
- * @property {number} HAMTShard=5 HAMTShard value
- */
- Data.DataType = (function() {
- var valuesById = {}, values = Object.create(valuesById);
- values[valuesById[0] = "Raw"] = 0;
- values[valuesById[1] = "Directory"] = 1;
- values[valuesById[2] = "File"] = 2;
- values[valuesById[3] = "Metadata"] = 3;
- values[valuesById[4] = "Symlink"] = 4;
- values[valuesById[5] = "HAMTShard"] = 5;
- return values;
- })();
-
- return Data;
-})();
-
-$root.UnixTime = (function() {
-
- /**
- * Properties of an UnixTime.
- * @exports IUnixTime
- * @interface IUnixTime
- * @property {number} Seconds UnixTime Seconds
- * @property {number|null} [FractionalNanoseconds] UnixTime FractionalNanoseconds
- */
-
- /**
- * Constructs a new UnixTime.
- * @exports UnixTime
- * @classdesc Represents an UnixTime.
- * @implements IUnixTime
- * @constructor
- * @param {IUnixTime=} [p] Properties to set
- */
- function UnixTime(p) {
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
- }
-
- /**
- * UnixTime Seconds.
- * @member {number} Seconds
- * @memberof UnixTime
- * @instance
- */
- UnixTime.prototype.Seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0;
-
- /**
- * UnixTime FractionalNanoseconds.
- * @member {number} FractionalNanoseconds
- * @memberof UnixTime
- * @instance
- */
- UnixTime.prototype.FractionalNanoseconds = 0;
-
- /**
- * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages.
- * @function encode
- * @memberof UnixTime
- * @static
- * @param {IUnixTime} m UnixTime message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- UnixTime.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- w.uint32(8).int64(m.Seconds);
- if (m.FractionalNanoseconds != null && Object.hasOwnProperty.call(m, "FractionalNanoseconds"))
- w.uint32(21).fixed32(m.FractionalNanoseconds);
- return w;
- };
-
- /**
- * Decodes an UnixTime message from the specified reader or buffer.
- * @function decode
- * @memberof UnixTime
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {UnixTime} UnixTime
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- UnixTime.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.UnixTime();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.Seconds = r.int64();
- break;
- case 2:
- m.FractionalNanoseconds = r.fixed32();
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- if (!m.hasOwnProperty("Seconds"))
- throw $util.ProtocolError("missing required 'Seconds'", { instance: m });
- return m;
- };
-
- /**
- * Creates an UnixTime message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof UnixTime
- * @static
- * @param {Object.} d Plain object
- * @returns {UnixTime} UnixTime
- */
- UnixTime.fromObject = function fromObject(d) {
- if (d instanceof $root.UnixTime)
- return d;
- var m = new $root.UnixTime();
- if (d.Seconds != null) {
- if ($util.Long)
- (m.Seconds = $util.Long.fromValue(d.Seconds)).unsigned = false;
- else if (typeof d.Seconds === "string")
- m.Seconds = parseInt(d.Seconds, 10);
- else if (typeof d.Seconds === "number")
- m.Seconds = d.Seconds;
- else if (typeof d.Seconds === "object")
- m.Seconds = new $util.LongBits(d.Seconds.low >>> 0, d.Seconds.high >>> 0).toNumber();
- }
- if (d.FractionalNanoseconds != null) {
- m.FractionalNanoseconds = d.FractionalNanoseconds >>> 0;
- }
- return m;
- };
-
- /**
- * Creates a plain object from an UnixTime message. Also converts values to other types if specified.
- * @function toObject
- * @memberof UnixTime
- * @static
- * @param {UnixTime} m UnixTime
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- UnixTime.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.defaults) {
- if ($util.Long) {
- var n = new $util.Long(0, 0, false);
- d.Seconds = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.Seconds = o.longs === String ? "0" : 0;
- d.FractionalNanoseconds = 0;
- }
- if (m.Seconds != null && m.hasOwnProperty("Seconds")) {
- if (typeof m.Seconds === "number")
- d.Seconds = o.longs === String ? String(m.Seconds) : m.Seconds;
- else
- d.Seconds = o.longs === String ? $util.Long.prototype.toString.call(m.Seconds) : o.longs === Number ? new $util.LongBits(m.Seconds.low >>> 0, m.Seconds.high >>> 0).toNumber() : m.Seconds;
- }
- if (m.FractionalNanoseconds != null && m.hasOwnProperty("FractionalNanoseconds")) {
- d.FractionalNanoseconds = m.FractionalNanoseconds;
- }
- return d;
- };
-
- /**
- * Converts this UnixTime to JSON.
- * @function toJSON
- * @memberof UnixTime
- * @instance
- * @returns {Object.} JSON object
- */
- UnixTime.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
-
- return UnixTime;
-})();
-
-$root.Metadata = (function() {
-
- /**
- * Properties of a Metadata.
- * @exports IMetadata
- * @interface IMetadata
- * @property {string|null} [MimeType] Metadata MimeType
- */
-
- /**
- * Constructs a new Metadata.
- * @exports Metadata
- * @classdesc Represents a Metadata.
- * @implements IMetadata
- * @constructor
- * @param {IMetadata=} [p] Properties to set
- */
- function Metadata(p) {
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
- }
-
- /**
- * Metadata MimeType.
- * @member {string} MimeType
- * @memberof Metadata
- * @instance
- */
- Metadata.prototype.MimeType = "";
-
- /**
- * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.
- * @function encode
- * @memberof Metadata
- * @static
- * @param {IMetadata} m Metadata message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- Metadata.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- if (m.MimeType != null && Object.hasOwnProperty.call(m, "MimeType"))
- w.uint32(10).string(m.MimeType);
- return w;
- };
-
- /**
- * Decodes a Metadata message from the specified reader or buffer.
- * @function decode
- * @memberof Metadata
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {Metadata} Metadata
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- Metadata.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.MimeType = r.string();
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- return m;
- };
-
- /**
- * Creates a Metadata message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof Metadata
- * @static
- * @param {Object.} d Plain object
- * @returns {Metadata} Metadata
- */
- Metadata.fromObject = function fromObject(d) {
- if (d instanceof $root.Metadata)
- return d;
- var m = new $root.Metadata();
- if (d.MimeType != null) {
- m.MimeType = String(d.MimeType);
- }
- return m;
- };
-
- /**
- * Creates a plain object from a Metadata message. Also converts values to other types if specified.
- * @function toObject
- * @memberof Metadata
- * @static
- * @param {Metadata} m Metadata
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- Metadata.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.defaults) {
- d.MimeType = "";
- }
- if (m.MimeType != null && m.hasOwnProperty("MimeType")) {
- d.MimeType = m.MimeType;
- }
- return d;
- };
-
- /**
- * Converts this Metadata to JSON.
- * @function toJSON
- * @memberof Metadata
- * @instance
- * @returns {Object.} JSON object
- */
- Metadata.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
-
- return Metadata;
-})();
-
-module.exports = $root;
-
-
-/***/ }),
-
-/***/ 9437:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-// @ts-ignore
-const BufferList = __nccwpck_require__(8386)
-
-/**
- * @type {import('../types').Chunker}
- */
-module.exports = async function * fixedSizeChunker (source, options) {
- let bl = new BufferList()
- let currentLength = 0
- let emitted = false
- const maxChunkSize = options.maxChunkSize
+var BufferList__default = /*#__PURE__*/_interopDefaultLegacy(BufferList);
+async function* fixedSizeChunker(source, options) {
+ let bl = new BufferList__default['default']();
+ let currentLength = 0;
+ let emitted = false;
+ const maxChunkSize = options.maxChunkSize;
for await (const buffer of source) {
- bl.append(buffer)
-
- currentLength += buffer.length
-
+ bl.append(buffer);
+ currentLength += buffer.length;
while (currentLength >= maxChunkSize) {
- yield bl.slice(0, maxChunkSize)
- emitted = true
-
- // throw away consumed bytes
+ yield bl.slice(0, maxChunkSize);
+ emitted = true;
if (maxChunkSize === bl.length) {
- bl = new BufferList()
- currentLength = 0
+ bl = new BufferList__default['default']();
+ currentLength = 0;
} else {
- const newBl = new BufferList()
- newBl.append(bl.shallowSlice(maxChunkSize))
- bl = newBl
-
- // update our offset
- currentLength -= maxChunkSize
+ const newBl = new BufferList__default['default']();
+ newBl.append(bl.shallowSlice(maxChunkSize));
+ bl = newBl;
+ currentLength -= maxChunkSize;
}
}
}
-
if (!emitted || currentLength) {
- // return any remaining bytes or an empty buffer
- yield bl.slice(0, currentLength)
+ yield bl.slice(0, currentLength);
}
}
+module.exports = fixedSizeChunker;
+
/***/ }),
-/***/ 7434:
+/***/ 7290:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-// @ts-ignore
-const BufferList = __nccwpck_require__(8386)
-// @ts-ignore
-const { create } = __nccwpck_require__(1715)
-const errcode = __nccwpck_require__(2997)
+var BufferList = __nccwpck_require__(8386);
+var rabinWasm = __nccwpck_require__(1715);
+var errCode = __nccwpck_require__(2997);
-/**
- * @typedef {object} RabinOptions
- * @property {number} min
- * @property {number} max
- * @property {number} bits
- * @property {number} window
- * @property {number} polynomial
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-/**
- * @type {import('../types').Chunker}
- */
-module.exports = async function * rabinChunker (source, options) {
- let min, max, avg
+var BufferList__default = /*#__PURE__*/_interopDefaultLegacy(BufferList);
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+async function* rabinChunker(source, options) {
+ let min, max, avg;
if (options.minChunkSize && options.maxChunkSize && options.avgChunkSize) {
- avg = options.avgChunkSize
- min = options.minChunkSize
- max = options.maxChunkSize
+ avg = options.avgChunkSize;
+ min = options.minChunkSize;
+ max = options.maxChunkSize;
} else if (!options.avgChunkSize) {
- throw errcode(new Error('please specify an average chunk size'), 'ERR_INVALID_AVG_CHUNK_SIZE')
+ throw errCode__default['default'](new Error('please specify an average chunk size'), 'ERR_INVALID_AVG_CHUNK_SIZE');
} else {
- avg = options.avgChunkSize
- min = avg / 3
- max = avg + (avg / 2)
+ avg = options.avgChunkSize;
+ min = avg / 3;
+ max = avg + avg / 2;
}
-
- // validate min/max/avg in the same way as go
if (min < 16) {
- throw errcode(new Error('rabin min must be greater than 16'), 'ERR_INVALID_MIN_CHUNK_SIZE')
+ throw errCode__default['default'](new Error('rabin min must be greater than 16'), 'ERR_INVALID_MIN_CHUNK_SIZE');
}
-
if (max < min) {
- max = min
+ max = min;
}
-
if (avg < min) {
- avg = min
+ avg = min;
}
-
- const sizepow = Math.floor(Math.log2(avg))
-
+ const sizepow = Math.floor(Math.log2(avg));
for await (const chunk of rabin(source, {
- min: min,
- max: max,
- bits: sizepow,
- window: options.window,
- polynomial: options.polynomial
- })) {
- yield chunk
- }
-}
-
-/**
- * @param {AsyncIterable} source
- * @param {RabinOptions} options
- */
-async function * rabin (source, options) {
- const r = await create(options.bits, options.min, options.max, options.window)
- const buffers = new BufferList()
-
+ min: min,
+ max: max,
+ bits: sizepow,
+ window: options.window,
+ polynomial: options.polynomial
+ })) {
+ yield chunk;
+ }
+}
+async function* rabin(source, options) {
+ const r = await rabinWasm.create(options.bits, options.min, options.max, options.window);
+ const buffers = new BufferList__default['default']();
for await (const chunk of source) {
- buffers.append(chunk)
-
- const sizes = r.fingerprint(chunk)
-
+ buffers.append(chunk);
+ const sizes = r.fingerprint(chunk);
for (let i = 0; i < sizes.length; i++) {
- const size = sizes[i]
- const buf = buffers.slice(0, size)
- buffers.consume(size)
-
- yield buf
+ const size = sizes[i];
+ const buf = buffers.slice(0, size);
+ buffers.consume(size);
+ yield buf;
}
}
-
if (buffers.length) {
- yield buffers.slice(0)
+ yield buffers.slice(0);
}
}
+module.exports = rabinChunker;
+
/***/ }),
-/***/ 5849:
+/***/ 7830:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const { UnixFS } = __nccwpck_require__(4171)
-const persist = __nccwpck_require__(8567)
-const { encode, prepare } = __nccwpck_require__(8012)
-
-/**
- * @typedef {import('../types').Directory} Directory
- */
+var ipfsUnixfs = __nccwpck_require__(4103);
+var persist = __nccwpck_require__(8095);
+var dagPb = __nccwpck_require__(8012);
-/**
- * @type {import('../types').UnixFSV1DagBuilder}
- */
const dirBuilder = async (item, blockstore, options) => {
- const unixfs = new UnixFS({
+ const unixfs = new ipfsUnixfs.UnixFS({
type: 'directory',
mtime: item.mtime,
mode: item.mode
- })
-
- const buffer = encode(prepare({ Data: unixfs.marshal() }))
- const cid = await persist(buffer, blockstore, options)
- const path = item.path
-
+ });
+ const buffer = dagPb.encode(dagPb.prepare({ Data: unixfs.marshal() }));
+ const cid = await persist(buffer, blockstore, options);
+ const path = item.path;
return {
cid,
path,
unixfs,
size: buffer.length
- }
-}
+ };
+};
-module.exports = dirBuilder
+module.exports = dirBuilder;
/***/ }),
-/***/ 6024:
+/***/ 9601:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const batch = __nccwpck_require__(3454)
-
-/**
- * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder
- */
+var batch = __nccwpck_require__(3454);
-/**
- * @type {FileDAGBuilder}
- */
-function balanced (source, reduce, options) {
- return reduceToParents(source, reduce, options)
-}
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-/**
- * @type {FileDAGBuilder}
- */
-async function reduceToParents (source, reduce, options) {
- const roots = []
+var batch__default = /*#__PURE__*/_interopDefaultLegacy(batch);
- for await (const chunked of batch(source, options.maxChildrenPerNode)) {
- roots.push(await reduce(chunked))
+function balanced(source, reduce, options) {
+ return reduceToParents(source, reduce, options);
+}
+async function reduceToParents(source, reduce, options) {
+ const roots = [];
+ for await (const chunked of batch__default['default'](source, options.maxChildrenPerNode)) {
+ roots.push(await reduce(chunked));
}
-
if (roots.length > 1) {
- return reduceToParents(roots, reduce, options)
+ return reduceToParents(roots, reduce, options);
}
-
- return roots[0]
+ return roots[0];
}
-module.exports = balanced
+module.exports = balanced;
/***/ }),
-/***/ 3794:
+/***/ 532:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const { UnixFS } = __nccwpck_require__(4171)
-const persist = __nccwpck_require__(8567)
-const dagPb = __nccwpck_require__(8012)
-const raw = __nccwpck_require__(2048)
+var ipfsUnixfs = __nccwpck_require__(4103);
+var persist = __nccwpck_require__(8095);
+var dagPb = __nccwpck_require__(8012);
+var rawCodec = __nccwpck_require__(2048);
-/**
- * @typedef {import('../../types').BufferImporter} BufferImporter
- */
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () {
+ return e[k];
+ }
+ });
+ }
+ });
+ }
+ n['default'] = e;
+ return Object.freeze(n);
+}
-/**
- * @type {BufferImporter}
- */
-async function * bufferImporter (file, block, options) {
+var dagPb__namespace = /*#__PURE__*/_interopNamespace(dagPb);
+var rawCodec__namespace = /*#__PURE__*/_interopNamespace(rawCodec);
+
+async function* bufferImporter(file, block, options) {
for await (let buffer of file.content) {
yield async () => {
- options.progress(buffer.length, file.path)
- let unixfs
-
- /** @type {import('../../types').PersistOptions} */
+ options.progress(buffer.length, file.path);
+ let unixfs;
const opts = {
- codec: dagPb,
+ codec: dagPb__namespace,
cidVersion: options.cidVersion,
hasher: options.hasher,
onlyHash: options.onlyHash
- }
-
+ };
if (options.rawLeaves) {
- opts.codec = raw
- opts.cidVersion = 1
+ opts.codec = rawCodec__namespace;
+ opts.cidVersion = 1;
} else {
- unixfs = new UnixFS({
+ unixfs = new ipfsUnixfs.UnixFS({
type: options.leafType,
data: buffer,
mtime: file.mtime,
mode: file.mode
- })
-
- buffer = dagPb.encode({
+ });
+ buffer = dagPb__namespace.encode({
Data: unixfs.marshal(),
Links: []
- })
+ });
}
-
return {
cid: await persist(buffer, block, opts),
unixfs,
size: buffer.length
- }
- }
+ };
+ };
}
}
-module.exports = bufferImporter
+module.exports = bufferImporter;
/***/ }),
-/***/ 5915:
+/***/ 1016:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const all = __nccwpck_require__(5810)
+var all = __nccwpck_require__(5810);
-/**
- * @type {import('../../types').FileDAGBuilder}
- */
-module.exports = async function (source, reduce) {
- return reduce(await all(source))
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
+
+var all__default = /*#__PURE__*/_interopDefaultLegacy(all);
+
+async function flat(source, reduce) {
+ return reduce(await all__default['default'](source));
}
+module.exports = flat;
+
/***/ }),
-/***/ 1347:
+/***/ 6234:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const errCode = __nccwpck_require__(2997)
-const { UnixFS } = __nccwpck_require__(4171)
-const persist = __nccwpck_require__(8567)
-const { encode, prepare } = __nccwpck_require__(8012)
-const parallelBatch = __nccwpck_require__(6615)
-const rawCodec = __nccwpck_require__(2048)
-const dagPb = __nccwpck_require__(8012)
+var errCode = __nccwpck_require__(2997);
+var ipfsUnixfs = __nccwpck_require__(4103);
+var persist = __nccwpck_require__(8095);
+var dagPb = __nccwpck_require__(8012);
+var parallelBatch = __nccwpck_require__(6615);
+var rawCodec = __nccwpck_require__(2048);
+var flat = __nccwpck_require__(1016);
+var balanced = __nccwpck_require__(9601);
+var trickle = __nccwpck_require__(4889);
+var bufferImporter = __nccwpck_require__(532);
-/**
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {import('../../types').File} File
- * @typedef {import('../../types').ImporterOptions} ImporterOptions
- * @typedef {import('../../types').Reducer} Reducer
- * @typedef {import('../../types').DAGBuilder} DAGBuilder
- * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-/**
- * @type {{ [key: string]: FileDAGBuilder}}
- */
-const dagBuilders = {
- flat: __nccwpck_require__(5915),
- balanced: __nccwpck_require__(6024),
- trickle: __nccwpck_require__(8468)
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () {
+ return e[k];
+ }
+ });
+ }
+ });
+ }
+ n['default'] = e;
+ return Object.freeze(n);
}
-/**
- * @param {File} file
- * @param {Blockstore} blockstore
- * @param {ImporterOptions} options
- */
-async function * buildFileBatch (file, blockstore, options) {
- let count = -1
- let previous
- let bufferImporter
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+var dagPb__namespace = /*#__PURE__*/_interopNamespace(dagPb);
+var parallelBatch__default = /*#__PURE__*/_interopDefaultLegacy(parallelBatch);
+var rawCodec__namespace = /*#__PURE__*/_interopNamespace(rawCodec);
+const dagBuilders = {
+ flat: flat,
+ balanced: balanced,
+ trickle: trickle
+};
+async function* buildFileBatch(file, blockstore, options) {
+ let count = -1;
+ let previous;
+ let bufferImporter$1;
if (typeof options.bufferImporter === 'function') {
- bufferImporter = options.bufferImporter
+ bufferImporter$1 = options.bufferImporter;
} else {
- bufferImporter = __nccwpck_require__(3794)
+ bufferImporter$1 = bufferImporter;
}
-
- for await (const entry of parallelBatch(bufferImporter(file, blockstore, options), options.blockWriteConcurrency)) {
- count++
-
+ for await (const entry of parallelBatch__default['default'](bufferImporter$1(file, blockstore, options), options.blockWriteConcurrency)) {
+ count++;
if (count === 0) {
- previous = entry
- continue
+ previous = entry;
+ continue;
} else if (count === 1 && previous) {
- yield previous
- previous = null
+ yield previous;
+ previous = null;
}
-
- yield entry
+ yield entry;
}
-
if (previous) {
- previous.single = true
- yield previous
+ previous.single = true;
+ yield previous;
}
}
-
-/**
- * @param {File} file
- * @param {Blockstore} blockstore
- * @param {ImporterOptions} options
- */
const reduce = (file, blockstore, options) => {
- /**
- * @type {Reducer}
- */
- async function reducer (leaves) {
+ async function reducer(leaves) {
if (leaves.length === 1 && leaves[0].single && options.reduceSingleLeafToSelf) {
- const leaf = leaves[0]
-
- if (leaf.cid.code === rawCodec.code && (file.mtime !== undefined || file.mode !== undefined)) {
- // only one leaf node which is a buffer - we have metadata so convert it into a
- // UnixFS entry otherwise we'll have nowhere to store the metadata
- let buffer = await blockstore.get(leaf.cid)
-
- leaf.unixfs = new UnixFS({
+ const leaf = leaves[0];
+ if (leaf.cid.code === rawCodec__namespace.code && (file.mtime !== undefined || file.mode !== undefined)) {
+ let buffer = await blockstore.get(leaf.cid);
+ leaf.unixfs = new ipfsUnixfs.UnixFS({
type: 'file',
mtime: file.mtime,
mode: file.mode,
data: buffer
- })
-
- buffer = encode(prepare({ Data: leaf.unixfs.marshal() }))
-
- // // TODO vmx 2021-03-26: This is what the original code does, it checks
- // // the multihash of the original leaf node and uses then the same
- // // hasher. i wonder if that's really needed or if we could just use
- // // the hasher from `options.hasher` instead.
- // const multihash = mh.decode(leaf.cid.multihash.bytes)
- // let hasher
- // switch multihash {
- // case sha256.code {
- // hasher = sha256
- // break;
- // }
- // //case identity.code {
- // // hasher = identity
- // // break;
- // //}
- // default: {
- // throw new Error(`Unsupported hasher "${multihash}"`)
- // }
- // }
+ });
+ buffer = dagPb.encode(dagPb.prepare({ Data: leaf.unixfs.marshal() }));
leaf.cid = await persist(buffer, blockstore, {
...options,
- codec: dagPb,
+ codec: dagPb__namespace,
hasher: options.hasher,
cidVersion: options.cidVersion
- })
- leaf.size = buffer.length
+ });
+ leaf.size = buffer.length;
}
-
return {
cid: leaf.cid,
path: file.path,
unixfs: leaf.unixfs,
size: leaf.size
- }
+ };
}
-
- // create a parent node and add all the leaves
- const f = new UnixFS({
+ const f = new ipfsUnixfs.UnixFS({
type: 'file',
mtime: file.mtime,
mode: file.mode
- })
-
- const links = leaves
- .filter(leaf => {
- if (leaf.cid.code === rawCodec.code && leaf.size) {
- return true
- }
-
- if (leaf.unixfs && !leaf.unixfs.data && leaf.unixfs.fileSize()) {
- return true
- }
-
- return Boolean(leaf.unixfs && leaf.unixfs.data && leaf.unixfs.data.length)
- })
- .map((leaf) => {
- if (leaf.cid.code === rawCodec.code) {
- // node is a leaf buffer
- f.addBlockSize(leaf.size)
-
- return {
- Name: '',
- Tsize: leaf.size,
- Hash: leaf.cid
- }
- }
-
- if (!leaf.unixfs || !leaf.unixfs.data) {
- // node is an intermediate node
- f.addBlockSize((leaf.unixfs && leaf.unixfs.fileSize()) || 0)
- } else {
- // node is a unixfs 'file' leaf node
- f.addBlockSize(leaf.unixfs.data.length)
- }
-
+ });
+ const links = leaves.filter(leaf => {
+ if (leaf.cid.code === rawCodec__namespace.code && leaf.size) {
+ return true;
+ }
+ if (leaf.unixfs && !leaf.unixfs.data && leaf.unixfs.fileSize()) {
+ return true;
+ }
+ return Boolean(leaf.unixfs && leaf.unixfs.data && leaf.unixfs.data.length);
+ }).map(leaf => {
+ if (leaf.cid.code === rawCodec__namespace.code) {
+ f.addBlockSize(leaf.size);
return {
Name: '',
Tsize: leaf.size,
Hash: leaf.cid
- }
- })
-
+ };
+ }
+ if (!leaf.unixfs || !leaf.unixfs.data) {
+ f.addBlockSize(leaf.unixfs && leaf.unixfs.fileSize() || 0);
+ } else {
+ f.addBlockSize(leaf.unixfs.data.length);
+ }
+ return {
+ Name: '',
+ Tsize: leaf.size,
+ Hash: leaf.cid
+ };
+ });
const node = {
Data: f.marshal(),
Links: links
- }
- const buffer = encode(prepare(node))
- const cid = await persist(buffer, blockstore, options)
-
+ };
+ const buffer = dagPb.encode(dagPb.prepare(node));
+ const cid = await persist(buffer, blockstore, options);
return {
cid,
path: file.path,
unixfs: f,
size: buffer.length + node.Links.reduce((acc, curr) => acc + curr.Tsize, 0)
- }
+ };
}
-
- return reducer
-}
-
-/**
- * @type {import('../../types').UnixFSV1DagBuilder}
- */
-function fileBuilder (file, block, options) {
- const dagBuilder = dagBuilders[options.strategy]
-
+ return reducer;
+};
+function fileBuilder(file, block, options) {
+ const dagBuilder = dagBuilders[options.strategy];
if (!dagBuilder) {
- throw errCode(new Error(`Unknown importer build strategy name: ${options.strategy}`), 'ERR_BAD_STRATEGY')
+ throw errCode__default['default'](new Error(`Unknown importer build strategy name: ${ options.strategy }`), 'ERR_BAD_STRATEGY');
}
-
- return dagBuilder(buildFileBatch(file, block, options), reduce(file, block, options), options)
+ return dagBuilder(buildFileBatch(file, block, options), reduce(file, block, options), options);
}
-module.exports = fileBuilder
+module.exports = fileBuilder;
/***/ }),
-/***/ 8468:
+/***/ 4889:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const batch = __nccwpck_require__(3454)
-
-/**
- * @typedef {import('ipfs-unixfs').UnixFS} UnixFS
- * @typedef {import('../../types').ImporterOptions} ImporterOptions
- * @typedef {import('../../types').InProgressImportResult} InProgressImportResult
- * @typedef {import('../../types').TrickleDagNode} TrickleDagNode
- * @typedef {import('../../types').Reducer} Reducer
- * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder
- */
+var batch = __nccwpck_require__(3454);
-/**
- * @type {FileDAGBuilder}
- */
-module.exports = async function trickleStream (source, reduce, options) {
- const root = new Root(options.layerRepeat)
- let iteration = 0
- let maxDepth = 1
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- /** @type {SubTree} */
- let subTree = root
+var batch__default = /*#__PURE__*/_interopDefaultLegacy(batch);
- for await (const layer of batch(source, options.maxChildrenPerNode)) {
+async function trickleStream(source, reduce, options) {
+ const root = new Root(options.layerRepeat);
+ let iteration = 0;
+ let maxDepth = 1;
+ let subTree = root;
+ for await (const layer of batch__default['default'](source, options.maxChildrenPerNode)) {
if (subTree.isFull()) {
if (subTree !== root) {
- root.addChild(await subTree.reduce(reduce))
+ root.addChild(await subTree.reduce(reduce));
}
-
if (iteration && iteration % options.layerRepeat === 0) {
- maxDepth++
+ maxDepth++;
}
-
- subTree = new SubTree(maxDepth, options.layerRepeat, iteration)
-
- iteration++
+ subTree = new SubTree(maxDepth, options.layerRepeat, iteration);
+ iteration++;
}
-
- subTree.append(layer)
+ subTree.append(layer);
}
-
if (subTree && subTree !== root) {
- root.addChild(await subTree.reduce(reduce))
+ root.addChild(await subTree.reduce(reduce));
}
-
- return root.reduce(reduce)
+ return root.reduce(reduce);
}
-
class SubTree {
- /**
- * @param {number} maxDepth
- * @param {number} layerRepeat
- * @param {number} [iteration=0]
- */
- constructor (maxDepth, layerRepeat, iteration = 0) {
- this.maxDepth = maxDepth
- this.layerRepeat = layerRepeat
- this.currentDepth = 1
- this.iteration = iteration
-
- /** @type {TrickleDagNode} */
+ constructor(maxDepth, layerRepeat, iteration = 0) {
+ this.maxDepth = maxDepth;
+ this.layerRepeat = layerRepeat;
+ this.currentDepth = 1;
+ this.iteration = iteration;
this.root = this.node = this.parent = {
children: [],
depth: this.currentDepth,
maxDepth,
maxChildren: (this.maxDepth - this.currentDepth) * this.layerRepeat
- }
+ };
}
-
- isFull () {
+ isFull() {
if (!this.root.data) {
- return false
+ return false;
}
-
if (this.currentDepth < this.maxDepth && this.node.maxChildren) {
- // can descend
- this._addNextNodeToParent(this.node)
-
- return false
+ this._addNextNodeToParent(this.node);
+ return false;
}
-
- // try to find new node from node.parent
- const distantRelative = this._findParent(this.node, this.currentDepth)
-
+ const distantRelative = this._findParent(this.node, this.currentDepth);
if (distantRelative) {
- this._addNextNodeToParent(distantRelative)
-
- return false
+ this._addNextNodeToParent(distantRelative);
+ return false;
}
-
- return true
+ return true;
}
-
- /**
- * @param {TrickleDagNode} parent
- */
- _addNextNodeToParent (parent) {
- this.parent = parent
-
- // find site for new node
+ _addNextNodeToParent(parent) {
+ this.parent = parent;
const nextNode = {
children: [],
depth: parent.depth + 1,
parent,
maxDepth: this.maxDepth,
maxChildren: Math.floor(parent.children.length / this.layerRepeat) * this.layerRepeat
- }
-
- // @ts-ignore
- parent.children.push(nextNode)
-
- this.currentDepth = nextNode.depth
- this.node = nextNode
+ };
+ parent.children.push(nextNode);
+ this.currentDepth = nextNode.depth;
+ this.node = nextNode;
}
-
- /**
- *
- * @param {InProgressImportResult[]} layer
- */
- append (layer) {
- this.node.data = layer
+ append(layer) {
+ this.node.data = layer;
}
-
- /**
- * @param {Reducer} reduce
- */
- reduce (reduce) {
- return this._reduce(this.root, reduce)
+ reduce(reduce) {
+ return this._reduce(this.root, reduce);
}
-
- /**
- * @param {TrickleDagNode} node
- * @param {Reducer} reduce
- * @returns {Promise}
- */
- async _reduce (node, reduce) {
- /** @type {InProgressImportResult[]} */
- let children = []
-
+ async _reduce(node, reduce) {
+ let children = [];
if (node.children.length) {
- children = await Promise.all(
- node.children
- // @ts-ignore
- .filter(child => child.data)
- // @ts-ignore
- .map(child => this._reduce(child, reduce))
- )
+ children = await Promise.all(node.children.filter(child => child.data).map(child => this._reduce(child, reduce)));
}
-
- return reduce((node.data || []).concat(children))
+ return reduce((node.data || []).concat(children));
}
-
- /**
- * @param {TrickleDagNode} node
- * @param {number} depth
- * @returns {TrickleDagNode | undefined}
- */
- _findParent (node, depth) {
- const parent = node.parent
-
+ _findParent(node, depth) {
+ const parent = node.parent;
if (!parent || parent.depth === 0) {
- return
+ return;
}
-
if (parent.children.length === parent.maxChildren || !parent.maxChildren) {
- // this layer is full, may be able to traverse to a different branch
- return this._findParent(parent, depth)
+ return this._findParent(parent, depth);
}
-
- return parent
+ return parent;
}
}
-
class Root extends SubTree {
- /**
- * @param {number} layerRepeat
- */
- constructor (layerRepeat) {
- super(0, layerRepeat)
-
- this.root.depth = 0
- this.currentDepth = 1
+ constructor(layerRepeat) {
+ super(0, layerRepeat);
+ this.root.depth = 0;
+ this.currentDepth = 1;
}
-
- /**
- * @param {InProgressImportResult} child
- */
- addChild (child) {
- this.root.children.push(child)
+ addChild(child) {
+ this.root.children.push(child);
}
-
- /**
- * @param {Reducer} reduce
- */
- reduce (reduce) {
- return reduce((this.root.data || []).concat(this.root.children))
+ reduce(reduce) {
+ return reduce((this.root.data || []).concat(this.root.children));
}
}
+module.exports = trickleStream;
+
/***/ }),
-/***/ 5475:
+/***/ 4390:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const dirBuilder = __nccwpck_require__(5849)
-const fileBuilder = __nccwpck_require__(1347)
-const errCode = __nccwpck_require__(2997)
+var dir = __nccwpck_require__(7830);
+var index = __nccwpck_require__(6234);
+var errCode = __nccwpck_require__(2997);
+var rabin = __nccwpck_require__(7290);
+var fixedSize = __nccwpck_require__(8452);
+var validateChunks = __nccwpck_require__(6628);
-/**
- * @typedef {import('../types').File} File
- * @typedef {import('../types').Directory} Directory
- * @typedef {import('../types').DAGBuilder} DAGBuilder
- * @typedef {import('../types').Chunker} Chunker
- * @typedef {import('../types').ChunkValidator} ChunkValidator
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-/**
- * @param {any} thing
- * @returns {thing is Iterable}
- */
-function isIterable (thing) {
- return Symbol.iterator in thing
-}
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
-/**
- * @param {any} thing
- * @returns {thing is AsyncIterable}
- */
-function isAsyncIterable (thing) {
- return Symbol.asyncIterator in thing
+function isIterable(thing) {
+ return Symbol.iterator in thing;
}
-
-/**
- * @param {Uint8Array | AsyncIterable | Iterable} content
- * @returns {AsyncIterable}
- */
-function contentAsAsyncIterable (content) {
+function isAsyncIterable(thing) {
+ return Symbol.asyncIterator in thing;
+}
+function contentAsAsyncIterable(content) {
try {
if (content instanceof Uint8Array) {
- return (async function * () {
- yield content
- }())
+ return async function* () {
+ yield content;
+ }();
} else if (isIterable(content)) {
- return (async function * () {
- yield * content
- }())
+ return async function* () {
+ yield* content;
+ }();
} else if (isAsyncIterable(content)) {
- return content
+ return content;
}
} catch {
- throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT')
+ throw errCode__default['default'](new Error('Content was invalid'), 'ERR_INVALID_CONTENT');
}
-
- throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT')
+ throw errCode__default['default'](new Error('Content was invalid'), 'ERR_INVALID_CONTENT');
}
-
-/**
- * @type {DAGBuilder}
- */
-async function * dagBuilder (source, blockstore, options) {
+async function* dagBuilder(source, blockstore, options) {
for await (const entry of source) {
if (entry.path) {
if (entry.path.substring(0, 2) === './') {
- options.wrapWithDirectory = true
+ options.wrapWithDirectory = true;
}
-
- entry.path = entry.path
- .split('/')
- .filter(path => path && path !== '.')
- .join('/')
+ entry.path = entry.path.split('/').filter(path => path && path !== '.').join('/');
}
-
if (entry.content) {
- /**
- * @type {Chunker}
- */
- let chunker
-
+ let chunker;
if (typeof options.chunker === 'function') {
- chunker = options.chunker
+ chunker = options.chunker;
} else if (options.chunker === 'rabin') {
- chunker = __nccwpck_require__(7434)
+ chunker = rabin;
} else {
- chunker = __nccwpck_require__(9437)
+ chunker = fixedSize;
}
-
- /**
- * @type {ChunkValidator}
- */
- let chunkValidator
-
+ let chunkValidator;
if (typeof options.chunkValidator === 'function') {
- chunkValidator = options.chunkValidator
+ chunkValidator = options.chunkValidator;
} else {
- chunkValidator = __nccwpck_require__(9185)
+ chunkValidator = validateChunks;
}
-
- /** @type {File} */
const file = {
path: entry.path,
mtime: entry.mtime,
mode: entry.mode,
content: chunker(chunkValidator(contentAsAsyncIterable(entry.content), options), options)
- }
-
- yield () => fileBuilder(file, blockstore, options)
+ };
+ yield () => index(file, blockstore, options);
} else if (entry.path) {
- /** @type {Directory} */
- const dir = {
+ const dir$1 = {
path: entry.path,
mtime: entry.mtime,
mode: entry.mode
- }
-
- yield () => dirBuilder(dir, blockstore, options)
+ };
+ yield () => dir(dir$1, blockstore, options);
} else {
- throw new Error('Import candidate must have content or path or both')
+ throw new Error('Import candidate must have content or path or both');
}
}
}
-module.exports = dagBuilder
+module.exports = dagBuilder;
/***/ }),
-/***/ 9185:
+/***/ 6628:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const errCode = __nccwpck_require__(2997)
-const uint8ArrayFromString = __nccwpck_require__(828)
+var errCode = __nccwpck_require__(2997);
+var fromString = __nccwpck_require__(3538);
-/**
- * @typedef {import('../types').ChunkValidator} ChunkValidator
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-/**
- * @type {ChunkValidator}
- */
-async function * validateChunks (source) {
+var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+
+async function* validateChunks(source) {
for await (const content of source) {
if (content.length === undefined) {
- throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT')
+ throw errCode__default['default'](new Error('Content was invalid'), 'ERR_INVALID_CONTENT');
}
-
if (typeof content === 'string' || content instanceof String) {
- yield uint8ArrayFromString(content.toString())
+ yield fromString.fromString(content.toString());
} else if (Array.isArray(content)) {
- yield Uint8Array.from(content)
+ yield Uint8Array.from(content);
} else if (content instanceof Uint8Array) {
- yield content
+ yield content;
} else {
- throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT')
+ throw errCode__default['default'](new Error('Content was invalid'), 'ERR_INVALID_CONTENT');
}
}
}
-module.exports = validateChunks
+module.exports = validateChunks;
/***/ }),
-/***/ 1607:
+/***/ 5849:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const { encode, prepare } = __nccwpck_require__(8012)
-const { UnixFS } = __nccwpck_require__(4171)
-const Dir = __nccwpck_require__(8245)
-const persist = __nccwpck_require__(8567)
-
-/**
- * @typedef {import('./types').ImporterOptions} ImporterOptions
- * @typedef {import('./types').ImportResult} ImportResult
- * @typedef {import('./types').InProgressImportResult} InProgressImportResult
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {import('./dir').DirProps} DirProps
- * @typedef {import('@ipld/dag-pb').PBNode} PBNode
- * @typedef {import('@ipld/dag-pb').PBLink} PBLink
- */
-
-class DirFlat extends Dir {
- /**
- * @param {DirProps} props
- * @param {ImporterOptions} options
- */
- constructor (props, options) {
- super(props, options)
+var dagPb = __nccwpck_require__(8012);
+var ipfsUnixfs = __nccwpck_require__(4103);
+var dir = __nccwpck_require__(4173);
+var persist = __nccwpck_require__(8095);
- /** @type {{ [key: string]: InProgressImportResult | Dir }} */
- this._children = {}
+class DirFlat extends dir {
+ constructor(props, options) {
+ super(props, options);
+ this._children = {};
}
-
- /**
- * @param {string} name
- * @param {InProgressImportResult | Dir} value
- */
- async put (name, value) {
- this.cid = undefined
- this.size = undefined
-
- this._children[name] = value
+ async put(name, value) {
+ this.cid = undefined;
+ this.size = undefined;
+ this._children[name] = value;
}
-
- /**
- * @param {string} name
- */
- get (name) {
- return Promise.resolve(this._children[name])
+ get(name) {
+ return Promise.resolve(this._children[name]);
}
-
- childCount () {
- return Object.keys(this._children).length
+ childCount() {
+ return Object.keys(this._children).length;
}
-
- directChildrenCount () {
- return this.childCount()
+ directChildrenCount() {
+ return this.childCount();
}
-
- onlyChild () {
- return this._children[Object.keys(this._children)[0]]
+ onlyChild() {
+ return this._children[Object.keys(this._children)[0]];
}
-
- async * eachChildSeries () {
- const keys = Object.keys(this._children)
-
+ async *eachChildSeries() {
+ const keys = Object.keys(this._children);
for (let i = 0; i < keys.length; i++) {
- const key = keys[i]
-
+ const key = keys[i];
yield {
key: key,
child: this._children[key]
- }
+ };
}
}
-
- /**
- * @param {Blockstore} block
- * @returns {AsyncIterable}
- */
- async * flush (block) {
- const children = Object.keys(this._children)
- const links = []
-
+ async *flush(block) {
+ const children = Object.keys(this._children);
+ const links = [];
for (let i = 0; i < children.length; i++) {
- let child = this._children[children[i]]
-
- if (child instanceof Dir) {
+ let child = this._children[children[i]];
+ if (child instanceof dir) {
for await (const entry of child.flush(block)) {
- child = entry
-
- yield child
+ child = entry;
+ yield child;
}
}
-
if (child.size != null && child.cid) {
links.push({
Name: children[i],
Tsize: child.size,
Hash: child.cid
- })
+ });
+ }
+ }
+ const unixfs = new ipfsUnixfs.UnixFS({
+ type: 'directory',
+ mtime: this.mtime,
+ mode: this.mode
+ });
+ const node = {
+ Data: unixfs.marshal(),
+ Links: links
+ };
+ const buffer = dagPb.encode(dagPb.prepare(node));
+ const cid = await persist(buffer, block, this.options);
+ const size = buffer.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize), 0);
+ this.cid = cid;
+ this.size = size;
+ yield {
+ cid,
+ unixfs,
+ path: this.path,
+ size
+ };
+ }
+}
+
+module.exports = DirFlat;
+
+
+/***/ }),
+
+/***/ 2922:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+var dagPb = __nccwpck_require__(8012);
+var ipfsUnixfs = __nccwpck_require__(4103);
+var dir = __nccwpck_require__(4173);
+var persist = __nccwpck_require__(8095);
+var hamtSharding = __nccwpck_require__(7820);
+
+class DirSharded extends dir {
+ constructor(props, options) {
+ super(props, options);
+ this._bucket = hamtSharding.createHAMT({
+ hashFn: options.hamtHashFn,
+ bits: options.hamtBucketBits
+ });
+ }
+ async put(name, value) {
+ await this._bucket.put(name, value);
+ }
+ get(name) {
+ return this._bucket.get(name);
+ }
+ childCount() {
+ return this._bucket.leafCount();
+ }
+ directChildrenCount() {
+ return this._bucket.childrenCount();
+ }
+ onlyChild() {
+ return this._bucket.onlyChild();
+ }
+ async *eachChildSeries() {
+ for await (const {key, value} of this._bucket.eachLeafSeries()) {
+ yield {
+ key,
+ child: value
+ };
+ }
+ }
+ async *flush(blockstore) {
+ for await (const entry of flush(this._bucket, blockstore, this, this.options)) {
+ yield {
+ ...entry,
+ path: this.path
+ };
+ }
+ }
+}
+async function* flush(bucket, blockstore, shardRoot, options) {
+ const children = bucket._children;
+ const links = [];
+ let childrenSize = 0;
+ for (let i = 0; i < children.length; i++) {
+ const child = children.get(i);
+ if (!child) {
+ continue;
+ }
+ const labelPrefix = i.toString(16).toUpperCase().padStart(2, '0');
+ if (child instanceof hamtSharding.Bucket) {
+ let shard;
+ for await (const subShard of await flush(child, blockstore, null, options)) {
+ shard = subShard;
+ }
+ if (!shard) {
+ throw new Error('Could not flush sharded directory, no subshard found');
+ }
+ links.push({
+ Name: labelPrefix,
+ Tsize: shard.size,
+ Hash: shard.cid
+ });
+ childrenSize += shard.size;
+ } else if (typeof child.value.flush === 'function') {
+ const dir = child.value;
+ let flushedDir;
+ for await (const entry of dir.flush(blockstore)) {
+ flushedDir = entry;
+ yield flushedDir;
+ }
+ const label = labelPrefix + child.key;
+ links.push({
+ Name: label,
+ Tsize: flushedDir.size,
+ Hash: flushedDir.cid
+ });
+ childrenSize += flushedDir.size;
+ } else {
+ const value = child.value;
+ if (!value.cid) {
+ continue;
+ }
+ const label = labelPrefix + child.key;
+ const size = value.size;
+ links.push({
+ Name: label,
+ Tsize: size,
+ Hash: value.cid
+ });
+ childrenSize += size;
+ }
+ }
+ const data = Uint8Array.from(children.bitField().reverse());
+ const dir = new ipfsUnixfs.UnixFS({
+ type: 'hamt-sharded-directory',
+ data,
+ fanout: bucket.tableSize(),
+ hashType: options.hamtHashCode,
+ mtime: shardRoot && shardRoot.mtime,
+ mode: shardRoot && shardRoot.mode
+ });
+ const node = {
+ Data: dir.marshal(),
+ Links: links
+ };
+ const buffer = dagPb.encode(dagPb.prepare(node));
+ const cid = await persist(buffer, blockstore, options);
+ const size = buffer.length + childrenSize;
+ yield {
+ cid,
+ unixfs: dir,
+ size
+ };
+}
+
+module.exports = DirSharded;
+
+
+/***/ }),
+
+/***/ 4173:
+/***/ ((module) => {
+
+"use strict";
+
+
+class Dir {
+ constructor(props, options) {
+ this.options = options || {};
+ this.root = props.root;
+ this.dir = props.dir;
+ this.path = props.path;
+ this.dirty = props.dirty;
+ this.flat = props.flat;
+ this.parent = props.parent;
+ this.parentKey = props.parentKey;
+ this.unixfs = props.unixfs;
+ this.mode = props.mode;
+ this.mtime = props.mtime;
+ this.cid = undefined;
+ this.size = undefined;
+ }
+ async put(name, value) {
+ }
+ get(name) {
+ return Promise.resolve(this);
+ }
+ async *eachChildSeries() {
+ }
+ async *flush(blockstore) {
+ }
+}
+
+module.exports = Dir;
+
+
+/***/ }),
+
+/***/ 1622:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+var dirSharded = __nccwpck_require__(2922);
+var dirFlat = __nccwpck_require__(5849);
+
+async function flatToShard(child, dir, threshold, options) {
+ let newDir = dir;
+ if (dir instanceof dirFlat && dir.directChildrenCount() >= threshold) {
+ newDir = await convertToShard(dir, options);
+ }
+ const parent = newDir.parent;
+ if (parent) {
+ if (newDir !== dir) {
+ if (child) {
+ child.parent = newDir;
}
+ if (!newDir.parentKey) {
+ throw new Error('No parent key found');
+ }
+ await parent.put(newDir.parentKey, newDir);
}
+ return flatToShard(newDir, parent, threshold, options);
+ }
+ return newDir;
+}
+async function convertToShard(oldDir, options) {
+ const newDir = new dirSharded({
+ root: oldDir.root,
+ dir: true,
+ parent: oldDir.parent,
+ parentKey: oldDir.parentKey,
+ path: oldDir.path,
+ dirty: oldDir.dirty,
+ flat: false,
+ mtime: oldDir.mtime,
+ mode: oldDir.mode
+ }, options);
+ for await (const {key, child} of oldDir.eachChildSeries()) {
+ await newDir.put(key, child);
+ }
+ return newDir;
+}
+
+module.exports = flatToShard;
+
+
+/***/ }),
+
+/***/ 1626:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
- const unixfs = new UnixFS({
- type: 'directory',
- mtime: this.mtime,
- mode: this.mode
- })
+Object.defineProperty(exports, "__esModule", ({ value: true }));
- /** @type {PBNode} */
- const node = { Data: unixfs.marshal(), Links: links }
- const buffer = encode(prepare(node))
- const cid = await persist(buffer, block, this.options)
- const size = buffer.length + node.Links.reduce(
- /**
- * @param {number} acc
- * @param {PBLink} curr
- */
- (acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize),
- 0)
+var parallelBatch = __nccwpck_require__(6615);
+var options = __nccwpck_require__(9902);
+var index = __nccwpck_require__(4390);
+var treeBuilder = __nccwpck_require__(5101);
+
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- this.cid = cid
- this.size = size
+var parallelBatch__default = /*#__PURE__*/_interopDefaultLegacy(parallelBatch);
+async function* importer(source, blockstore, options$1 = {}) {
+ const opts = options(options$1);
+ let dagBuilder;
+ if (typeof options$1.dagBuilder === 'function') {
+ dagBuilder = options$1.dagBuilder;
+ } else {
+ dagBuilder = index;
+ }
+ let treeBuilder$1;
+ if (typeof options$1.treeBuilder === 'function') {
+ treeBuilder$1 = options$1.treeBuilder;
+ } else {
+ treeBuilder$1 = treeBuilder;
+ }
+ let candidates;
+ if (Symbol.asyncIterator in source || Symbol.iterator in source) {
+ candidates = source;
+ } else {
+ candidates = [source];
+ }
+ for await (const entry of treeBuilder$1(parallelBatch__default['default'](dagBuilder(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) {
yield {
- cid,
- unixfs,
- path: this.path,
- size
- }
+ cid: entry.cid,
+ path: entry.path,
+ unixfs: entry.unixfs,
+ size: entry.size
+ };
}
}
-module.exports = DirFlat
+exports.importer = importer;
/***/ }),
-/***/ 1742:
+/***/ 9902:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const { encode, prepare } = __nccwpck_require__(8012)
-const { UnixFS } = __nccwpck_require__(4171)
-const Dir = __nccwpck_require__(8245)
-const persist = __nccwpck_require__(8567)
-const { createHAMT, Bucket } = __nccwpck_require__(7820)
+var mergeOptions = __nccwpck_require__(2555);
+var sha2 = __nccwpck_require__(6987);
+var murmur3 = __nccwpck_require__(6063);
-/**
- * @typedef {import('./types').ImporterOptions} ImporterOptions
- * @typedef {import('./types').ImportResult} ImportResult
- * @typedef {import('./types').InProgressImportResult} InProgressImportResult
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
-/**
- * @typedef {import('./dir').DirProps} DirProps
- */
+var mergeOptions__default = /*#__PURE__*/_interopDefaultLegacy(mergeOptions);
-class DirSharded extends Dir {
- /**
- * @param {DirProps} props
- * @param {ImporterOptions} options
- */
- constructor (props, options) {
- super(props, options)
+async function hamtHashFn(buf) {
+ return (await murmur3.murmur3128.encode(buf)).slice(0, 8).reverse();
+}
+const defaultOptions = {
+ chunker: 'fixed',
+ strategy: 'balanced',
+ rawLeaves: false,
+ onlyHash: false,
+ reduceSingleLeafToSelf: true,
+ hasher: sha2.sha256,
+ leafType: 'file',
+ cidVersion: 0,
+ progress: () => () => {
+ },
+ shardSplitThreshold: 1000,
+ fileImportConcurrency: 50,
+ blockWriteConcurrency: 10,
+ minChunkSize: 262144,
+ maxChunkSize: 262144,
+ avgChunkSize: 262144,
+ window: 16,
+ polynomial: 17437180132763652,
+ maxChildrenPerNode: 174,
+ layerRepeat: 4,
+ wrapWithDirectory: false,
+ recursive: false,
+ hidden: false,
+ timeout: undefined,
+ hamtHashFn,
+ hamtHashCode: 34,
+ hamtBucketBits: 8
+};
+var defaultOptions$1 = (options = {}) => {
+ const defaults = mergeOptions__default['default'].bind({ ignoreUndefined: true });
+ return defaults(defaultOptions, options);
+};
- /** @type {Bucket} */
- this._bucket = createHAMT({
- hashFn: options.hamtHashFn,
- bits: options.hamtBucketBits
- })
- }
+module.exports = defaultOptions$1;
- /**
- * @param {string} name
- * @param {InProgressImportResult | Dir} value
- */
- async put (name, value) {
- await this._bucket.put(name, value)
- }
- /**
- * @param {string} name
- */
- get (name) {
- return this._bucket.get(name)
- }
+/***/ }),
- childCount () {
- return this._bucket.leafCount()
- }
+/***/ 5101:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- directChildrenCount () {
- return this._bucket.childrenCount()
- }
+"use strict";
- onlyChild () {
- return this._bucket.onlyChild()
- }
- async * eachChildSeries () {
- for await (const { key, value } of this._bucket.eachLeafSeries()) {
- yield {
- key,
- child: value
+var dirFlat = __nccwpck_require__(5849);
+var flatToShard = __nccwpck_require__(1622);
+var dir = __nccwpck_require__(4173);
+var toPathComponents = __nccwpck_require__(4473);
+
+async function addToTree(elem, tree, options) {
+ const pathElems = toPathComponents(elem.path || '');
+ const lastIndex = pathElems.length - 1;
+ let parent = tree;
+ let currentPath = '';
+ for (let i = 0; i < pathElems.length; i++) {
+ const pathElem = pathElems[i];
+ currentPath += `${ currentPath ? '/' : '' }${ pathElem }`;
+ const last = i === lastIndex;
+ parent.dirty = true;
+ parent.cid = undefined;
+ parent.size = undefined;
+ if (last) {
+ await parent.put(pathElem, elem);
+ tree = await flatToShard(null, parent, options.shardSplitThreshold, options);
+ } else {
+ let dir$1 = await parent.get(pathElem);
+ if (!dir$1 || !(dir$1 instanceof dir)) {
+ dir$1 = new dirFlat({
+ root: false,
+ dir: true,
+ parent: parent,
+ parentKey: pathElem,
+ path: currentPath,
+ dirty: true,
+ flat: true,
+ mtime: dir$1 && dir$1.unixfs && dir$1.unixfs.mtime,
+ mode: dir$1 && dir$1.unixfs && dir$1.unixfs.mode
+ }, options);
}
+ await parent.put(pathElem, dir$1);
+ parent = dir$1;
}
}
-
- /**
- * @param {Blockstore} blockstore
- * @returns {AsyncIterable}
- */
- async * flush (blockstore) {
- for await (const entry of flush(this._bucket, blockstore, this, this.options)) {
- yield {
- ...entry,
- path: this.path
- }
+ return tree;
+}
+async function* flushAndYield(tree, blockstore) {
+ if (!(tree instanceof dir)) {
+ if (tree && tree.unixfs && tree.unixfs.isDirectory()) {
+ yield tree;
}
+ return;
}
+ yield* tree.flush(blockstore);
}
-
-module.exports = DirSharded
-
-/**
- * @param {Bucket>} bucket
- * @param {Blockstore} blockstore
- * @param {*} shardRoot
- * @param {ImporterOptions} options
- * @returns {AsyncIterable}
- */
-async function * flush (bucket, blockstore, shardRoot, options) {
- const children = bucket._children
- const links = []
- let childrenSize = 0
-
- for (let i = 0; i < children.length; i++) {
- const child = children.get(i)
-
- if (!child) {
- continue
+async function* treeBuilder(source, block, options) {
+ let tree = new dirFlat({
+ root: true,
+ dir: true,
+ path: '',
+ dirty: true,
+ flat: true
+ }, options);
+ for await (const entry of source) {
+ if (!entry) {
+ continue;
}
-
- const labelPrefix = i.toString(16).toUpperCase().padStart(2, '0')
-
- if (child instanceof Bucket) {
- let shard
-
- for await (const subShard of await flush(child, blockstore, null, options)) {
- shard = subShard
- }
-
- if (!shard) {
- throw new Error('Could not flush sharded directory, no subshard found')
+ tree = await addToTree(entry, tree, options);
+ if (!entry.unixfs || !entry.unixfs.isDirectory()) {
+ yield entry;
+ }
+ }
+ if (options.wrapWithDirectory) {
+ yield* flushAndYield(tree, block);
+ } else {
+ for await (const unwrapped of tree.eachChildSeries()) {
+ if (!unwrapped) {
+ continue;
}
+ yield* flushAndYield(unwrapped.child, block);
+ }
+ }
+}
- links.push({
- Name: labelPrefix,
- Tsize: shard.size,
- Hash: shard.cid
- })
- childrenSize += shard.size
- } else if (typeof child.value.flush === 'function') {
- const dir = child.value
- let flushedDir
+module.exports = treeBuilder;
- for await (const entry of dir.flush(blockstore)) {
- flushedDir = entry
- yield flushedDir
- }
+/***/ }),
- const label = labelPrefix + child.key
- links.push({
- Name: label,
- Tsize: flushedDir.size,
- Hash: flushedDir.cid
- })
+/***/ 8095:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- childrenSize += flushedDir.size
- } else {
- const value = child.value
+"use strict";
- if (!value.cid) {
- continue
- }
- const label = labelPrefix + child.key
- const size = value.size
+var cid = __nccwpck_require__(6447);
+var dagPb = __nccwpck_require__(8012);
+var sha2 = __nccwpck_require__(6987);
- links.push({
- Name: label,
- Tsize: size,
- Hash: value.cid
- })
- childrenSize += size
- }
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () {
+ return e[k];
+ }
+ });
+ }
+ });
}
+ n['default'] = e;
+ return Object.freeze(n);
+}
- // go-ipfs uses little endian, that's why we have to
- // reverse the bit field before storing it
- const data = Uint8Array.from(children.bitField().reverse())
- const dir = new UnixFS({
- type: 'hamt-sharded-directory',
- data,
- fanout: bucket.tableSize(),
- hashType: options.hamtHashCode,
- mtime: shardRoot && shardRoot.mtime,
- mode: shardRoot && shardRoot.mode
- })
+var dagPb__namespace = /*#__PURE__*/_interopNamespace(dagPb);
- const node = {
- Data: dir.marshal(),
- Links: links
+const persist = async (buffer, blockstore, options) => {
+ if (!options.codec) {
+ options.codec = dagPb__namespace;
}
- const buffer = encode(prepare(node))
- const cid = await persist(buffer, blockstore, options)
- const size = buffer.length + childrenSize
-
- yield {
- cid,
- unixfs: dir,
- size
+ if (!options.hasher) {
+ options.hasher = sha2.sha256;
}
-}
+ if (options.cidVersion === undefined) {
+ options.cidVersion = 1;
+ }
+ if (options.codec === dagPb__namespace && options.hasher !== sha2.sha256) {
+ options.cidVersion = 1;
+ }
+ const multihash = await options.hasher.digest(buffer);
+ const cid$1 = cid.CID.create(options.cidVersion, options.codec.code, multihash);
+ if (!options.onlyHash) {
+ await blockstore.put(cid$1, buffer, { signal: options.signal });
+ }
+ return cid$1;
+};
+
+module.exports = persist;
/***/ }),
-/***/ 8245:
+/***/ 4473:
/***/ ((module) => {
"use strict";
-/**
- * @typedef {import('./types').ImporterOptions} ImporterOptions
- * @typedef {import('./types').ImportResult} ImportResult
- * @typedef {import('./types').InProgressImportResult} InProgressImportResult
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {import('multiformats/cid').CID} CID
- * @typedef {object} DirProps
- * @property {boolean} root
- * @property {boolean} dir
- * @property {string} path
- * @property {boolean} dirty
- * @property {boolean} flat
- * @property {Dir} [parent]
- * @property {string} [parentKey]
- * @property {import('ipfs-unixfs').UnixFS} [unixfs]
- * @property {number} [mode]
- * @property {import('ipfs-unixfs').Mtime} [mtime]
- */
-class Dir {
- /**
- *
- * @param {DirProps} props
- * @param {ImporterOptions} options
- */
- constructor (props, options) {
- this.options = options || {}
-
- this.root = props.root
- this.dir = props.dir
- this.path = props.path
- this.dirty = props.dirty
- this.flat = props.flat
- this.parent = props.parent
- this.parentKey = props.parentKey
- this.unixfs = props.unixfs
- this.mode = props.mode
- this.mtime = props.mtime
-
- /** @type {CID | undefined} */
- this.cid = undefined
- /** @type {number | undefined} */
- this.size = undefined
- }
+const toPathComponents = (path = '') => {
+ return (path.trim().match(/([^\\^/]|\\\/)+/g) || []).filter(Boolean);
+};
- /**
- * @param {string} name
- * @param {InProgressImportResult | Dir} value
- */
- async put (name, value) { }
+module.exports = toPathComponents;
- /**
- * @param {string} name
- * @returns {Promise}
- */
- get (name) {
- return Promise.resolve(this)
- }
- /**
- * @returns {AsyncIterable<{ key: string, child: InProgressImportResult | Dir}>}
- */
- async * eachChildSeries () { }
+/***/ }),
- /**
- * @param {Blockstore} blockstore
- * @returns {AsyncIterable}
- */
- async * flush (blockstore) { }
-}
+/***/ 8386:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-module.exports = Dir
+"use strict";
-/***/ }),
+const { Buffer } = __nccwpck_require__(4293)
+const symbol = Symbol.for('BufferList')
-/***/ 58:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+function BufferList (buf) {
+ if (!(this instanceof BufferList)) {
+ return new BufferList(buf)
+ }
-"use strict";
+ BufferList._init.call(this, buf)
+}
+BufferList._init = function _init (buf) {
+ Object.defineProperty(this, symbol, { value: true })
-const DirSharded = __nccwpck_require__(1742)
-const DirFlat = __nccwpck_require__(1607)
+ this._bufs = []
+ this.length = 0
-/**
- * @typedef {import('./dir')} Dir
- * @typedef {import('./types').ImporterOptions} ImporterOptions
- */
+ if (buf) {
+ this.append(buf)
+ }
+}
-/**
- * @param {Dir | null} child
- * @param {Dir} dir
- * @param {number} threshold
- * @param {ImporterOptions} options
- * @returns {Promise}
- */
-module.exports = async function flatToShard (child, dir, threshold, options) {
- let newDir = dir
+BufferList.prototype._new = function _new (buf) {
+ return new BufferList(buf)
+}
- if (dir instanceof DirFlat && dir.directChildrenCount() >= threshold) {
- newDir = await convertToShard(dir, options)
+BufferList.prototype._offset = function _offset (offset) {
+ if (offset === 0) {
+ return [0, 0]
}
- const parent = newDir.parent
-
- if (parent) {
- if (newDir !== dir) {
- if (child) {
- child.parent = newDir
- }
-
- if (!newDir.parentKey) {
- throw new Error('No parent key found')
- }
+ let tot = 0
- await parent.put(newDir.parentKey, newDir)
+ for (let i = 0; i < this._bufs.length; i++) {
+ const _t = tot + this._bufs[i].length
+ if (offset < _t || i === this._bufs.length - 1) {
+ return [i, offset - tot]
}
-
- return flatToShard(newDir, parent, threshold, options)
+ tot = _t
}
-
- // @ts-ignore
- return newDir
}
-/**
- * @param {DirFlat} oldDir
- * @param {ImporterOptions} options
- */
-async function convertToShard (oldDir, options) {
- const newDir = new DirSharded({
- root: oldDir.root,
- dir: true,
- parent: oldDir.parent,
- parentKey: oldDir.parentKey,
- path: oldDir.path,
- dirty: oldDir.dirty,
- flat: false,
- mtime: oldDir.mtime,
- mode: oldDir.mode
- }, options)
+BufferList.prototype._reverseOffset = function (blOffset) {
+ const bufferId = blOffset[0]
+ let offset = blOffset[1]
- for await (const { key, child } of oldDir.eachChildSeries()) {
- await newDir.put(key, child)
+ for (let i = 0; i < bufferId; i++) {
+ offset += this._bufs[i].length
}
- return newDir
+ return offset
}
+BufferList.prototype.get = function get (index) {
+ if (index > this.length || index < 0) {
+ return undefined
+ }
-/***/ }),
-
-/***/ 1333:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
+ const offset = this._offset(index)
+ return this._bufs[offset[0]][offset[1]]
+}
-const parallelBatch = __nccwpck_require__(6615)
-const defaultOptions = __nccwpck_require__(1379)
+BufferList.prototype.slice = function slice (start, end) {
+ if (typeof start === 'number' && start < 0) {
+ start += this.length
+ }
-/**
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {import('./types').ImportCandidate} ImportCandidate
- * @typedef {import('./types').UserImporterOptions} UserImporterOptions
- * @typedef {import('./types').ImporterOptions} ImporterOptions
- * @typedef {import('./types').Directory} Directory
- * @typedef {import('./types').File} File
- * @typedef {import('./types').ImportResult} ImportResult
- *
- * @typedef {import('./types').Chunker} Chunker
- * @typedef {import('./types').DAGBuilder} DAGBuilder
- * @typedef {import('./types').TreeBuilder} TreeBuilder
- * @typedef {import('./types').BufferImporter} BufferImporter
- * @typedef {import('./types').ChunkValidator} ChunkValidator
- * @typedef {import('./types').Reducer} Reducer
- * @typedef {import('./types').ProgressHandler} ProgressHandler
- */
+ if (typeof end === 'number' && end < 0) {
+ end += this.length
+ }
-/**
- * @param {AsyncIterable | Iterable | ImportCandidate} source
- * @param {Blockstore} blockstore
- * @param {UserImporterOptions} options
- */
-async function * importer (source, blockstore, options = {}) {
- const opts = defaultOptions(options)
+ return this.copy(null, 0, start, end)
+}
- let dagBuilder
+BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
+ if (typeof srcStart !== 'number' || srcStart < 0) {
+ srcStart = 0
+ }
- if (typeof options.dagBuilder === 'function') {
- dagBuilder = options.dagBuilder
- } else {
- dagBuilder = __nccwpck_require__(5475)
+ if (typeof srcEnd !== 'number' || srcEnd > this.length) {
+ srcEnd = this.length
}
- let treeBuilder
+ if (srcStart >= this.length) {
+ return dst || Buffer.alloc(0)
+ }
- if (typeof options.treeBuilder === 'function') {
- treeBuilder = options.treeBuilder
- } else {
- treeBuilder = __nccwpck_require__(722)
+ if (srcEnd <= 0) {
+ return dst || Buffer.alloc(0)
}
- /** @type {AsyncIterable | Iterable} */
- let candidates
+ const copy = !!dst
+ const off = this._offset(srcStart)
+ const len = srcEnd - srcStart
+ let bytes = len
+ let bufoff = (copy && dstStart) || 0
+ let start = off[1]
- if (Symbol.asyncIterator in source || Symbol.iterator in source) {
- // @ts-ignore
- candidates = source
- } else {
- // @ts-ignore
- candidates = [source]
- }
+ // copy/slice everything
+ if (srcStart === 0 && srcEnd === this.length) {
+ if (!copy) {
+ // slice, but full concat if multiple buffers
+ return this._bufs.length === 1
+ ? this._bufs[0]
+ : Buffer.concat(this._bufs, this.length)
+ }
- for await (const entry of treeBuilder(parallelBatch(dagBuilder(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) {
- yield {
- cid: entry.cid,
- path: entry.path,
- unixfs: entry.unixfs,
- size: entry.size
+ // copy, need to copy individual buffers
+ for (let i = 0; i < this._bufs.length; i++) {
+ this._bufs[i].copy(dst, bufoff)
+ bufoff += this._bufs[i].length
}
+
+ return dst
}
-}
-module.exports = {
- importer
-}
+ // easy, cheap case where it's a subset of one of the buffers
+ if (bytes <= this._bufs[off[0]].length - start) {
+ return copy
+ ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
+ : this._bufs[off[0]].slice(start, start + bytes)
+ }
+ if (!copy) {
+ // a slice, we need something to copy in to
+ dst = Buffer.allocUnsafe(len)
+ }
-/***/ }),
+ for (let i = off[0]; i < this._bufs.length; i++) {
+ const l = this._bufs[i].length - start
-/***/ 1379:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ if (bytes > l) {
+ this._bufs[i].copy(dst, bufoff, start)
+ bufoff += l
+ } else {
+ this._bufs[i].copy(dst, bufoff, start, start + bytes)
+ bufoff += l
+ break
+ }
-"use strict";
+ bytes -= l
+ if (start) {
+ start = 0
+ }
+ }
-const mergeOptions = __nccwpck_require__(2555).bind({ ignoreUndefined: true })
-const { sha256 } = __nccwpck_require__(6987)
-// @ts-ignore - no types available
-const mur = __nccwpck_require__(7214)
-const uint8ArrayFromString = __nccwpck_require__(828)
+ // safeguard so that we don't return uninitialized memory
+ if (dst.length > bufoff) return dst.slice(0, bufoff)
-/**
- * @param {Uint8Array} buf
- */
-async function hamtHashFn (buf) {
- return uint8ArrayFromString(mur.x64.hash128(buf), 'base16')
- // Murmur3 outputs 128 bit but, accidentally, IPFS Go's
- // implementation only uses the first 64, so we must do the same
- // for parity..
- .slice(0, 8)
- // Invert buffer because that's how Go impl does it
- .reverse()
+ return dst
}
-/**
- * @typedef {import('./types').UserImporterOptions} UserImporterOptions
- * @typedef {import('./types').ImporterOptions} ImporterOptions
- */
-
-/**
- * @type {ImporterOptions}
- */
-const defaultOptions = {
- chunker: 'fixed',
- strategy: 'balanced', // 'flat', 'trickle'
- rawLeaves: false,
- onlyHash: false,
- reduceSingleLeafToSelf: true,
- hasher: sha256,
- leafType: 'file', // 'raw'
- cidVersion: 0,
- progress: () => () => {},
- shardSplitThreshold: 1000,
- fileImportConcurrency: 50,
- blockWriteConcurrency: 10,
- minChunkSize: 262144,
- maxChunkSize: 262144,
- avgChunkSize: 262144,
- window: 16,
- // FIXME: This number is too big for JavaScript
- // https://github.com/ipfs/go-ipfs-chunker/blob/d0125832512163708c0804a3cda060e21acddae4/rabin.go#L11
- polynomial: 17437180132763653, // eslint-disable-line no-loss-of-precision
- maxChildrenPerNode: 174,
- layerRepeat: 4,
- wrapWithDirectory: false,
- recursive: false,
- hidden: false,
- timeout: undefined,
- hamtHashFn,
- hamtHashCode: 0x22,
- hamtBucketBits: 8
-}
+BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
+ start = start || 0
+ end = typeof end !== 'number' ? this.length : end
-/**
- * @param {UserImporterOptions} options
- * @returns {ImporterOptions}
- */
-module.exports = function (options = {}) {
- return mergeOptions(defaultOptions, options)
-}
+ if (start < 0) {
+ start += this.length
+ }
+ if (end < 0) {
+ end += this.length
+ }
-/***/ }),
+ if (start === end) {
+ return this._new()
+ }
-/***/ 722:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ const startOffset = this._offset(start)
+ const endOffset = this._offset(end)
+ const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
-"use strict";
+ if (endOffset[1] === 0) {
+ buffers.pop()
+ } else {
+ buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1])
+ }
+ if (startOffset[1] !== 0) {
+ buffers[0] = buffers[0].slice(startOffset[1])
+ }
-const DirFlat = __nccwpck_require__(1607)
-const flatToShard = __nccwpck_require__(58)
-const Dir = __nccwpck_require__(8245)
-const toPathComponents = __nccwpck_require__(6301)
+ return this._new(buffers)
+}
-/**
- * @typedef {import('./types').ImportResult} ImportResult
- * @typedef {import('./types').InProgressImportResult} InProgressImportResult
- * @typedef {import('./types').ImporterOptions} ImporterOptions
- * @typedef {import('interface-blockstore').Blockstore} Blockstore
- * @typedef {(source: AsyncIterable, blockstore: Blockstore, options: ImporterOptions) => AsyncIterable} TreeBuilder
- */
+BufferList.prototype.toString = function toString (encoding, start, end) {
+ return this.slice(start, end).toString(encoding)
+}
-/**
- * @param {InProgressImportResult} elem
- * @param {Dir} tree
- * @param {ImporterOptions} options
- */
-async function addToTree (elem, tree, options) {
- const pathElems = toPathComponents(elem.path || '')
- const lastIndex = pathElems.length - 1
- let parent = tree
- let currentPath = ''
+BufferList.prototype.consume = function consume (bytes) {
+ // first, normalize the argument, in accordance with how Buffer does it
+ bytes = Math.trunc(bytes)
+ // do nothing if not a positive number
+ if (Number.isNaN(bytes) || bytes <= 0) return this
- for (let i = 0; i < pathElems.length; i++) {
- const pathElem = pathElems[i]
+ while (this._bufs.length) {
+ if (bytes >= this._bufs[0].length) {
+ bytes -= this._bufs[0].length
+ this.length -= this._bufs[0].length
+ this._bufs.shift()
+ } else {
+ this._bufs[0] = this._bufs[0].slice(bytes)
+ this.length -= bytes
+ break
+ }
+ }
- currentPath += `${currentPath ? '/' : ''}${pathElem}`
+ return this
+}
- const last = (i === lastIndex)
- parent.dirty = true
- parent.cid = undefined
- parent.size = undefined
+BufferList.prototype.duplicate = function duplicate () {
+ const copy = this._new()
- if (last) {
- await parent.put(pathElem, elem)
- tree = await flatToShard(null, parent, options.shardSplitThreshold, options)
- } else {
- let dir = await parent.get(pathElem)
+ for (let i = 0; i < this._bufs.length; i++) {
+ copy.append(this._bufs[i])
+ }
- if (!dir || !(dir instanceof Dir)) {
- dir = new DirFlat({
- root: false,
- dir: true,
- parent: parent,
- parentKey: pathElem,
- path: currentPath,
- dirty: true,
- flat: true,
- mtime: dir && dir.unixfs && dir.unixfs.mtime,
- mode: dir && dir.unixfs && dir.unixfs.mode
- }, options)
- }
+ return copy
+}
- await parent.put(pathElem, dir)
+BufferList.prototype.append = function append (buf) {
+ if (buf == null) {
+ return this
+ }
- parent = dir
+ if (buf.buffer) {
+ // append a view of the underlying ArrayBuffer
+ this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength))
+ } else if (Array.isArray(buf)) {
+ for (let i = 0; i < buf.length; i++) {
+ this.append(buf[i])
+ }
+ } else if (this._isBufferList(buf)) {
+ // unwrap argument into individual BufferLists
+ for (let i = 0; i < buf._bufs.length; i++) {
+ this.append(buf._bufs[i])
+ }
+ } else {
+ // coerce number arguments to strings, since Buffer(number) does
+ // uninitialized memory allocation
+ if (typeof buf === 'number') {
+ buf = buf.toString()
}
+
+ this._appendBuffer(Buffer.from(buf))
}
- return tree
+ return this
}
-/**
- * @param {Dir | InProgressImportResult} tree
- * @param {Blockstore} blockstore
- */
-async function * flushAndYield (tree, blockstore) {
- if (!(tree instanceof Dir)) {
- if (tree && tree.unixfs && tree.unixfs.isDirectory()) {
- yield tree
- }
+BufferList.prototype._appendBuffer = function appendBuffer (buf) {
+ this._bufs.push(buf)
+ this.length += buf.length
+}
- return
+BufferList.prototype.indexOf = function (search, offset, encoding) {
+ if (encoding === undefined && typeof offset === 'string') {
+ encoding = offset
+ offset = undefined
}
- yield * tree.flush(blockstore)
-}
-
-/**
- * @type {TreeBuilder}
- */
-async function * treeBuilder (source, block, options) {
- /** @type {Dir} */
- let tree = new DirFlat({
- root: true,
- dir: true,
- path: '',
- dirty: true,
- flat: true
- }, options)
+ if (typeof search === 'function' || Array.isArray(search)) {
+ throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
+ } else if (typeof search === 'number') {
+ search = Buffer.from([search])
+ } else if (typeof search === 'string') {
+ search = Buffer.from(search, encoding)
+ } else if (this._isBufferList(search)) {
+ search = search.slice()
+ } else if (Array.isArray(search.buffer)) {
+ search = Buffer.from(search.buffer, search.byteOffset, search.byteLength)
+ } else if (!Buffer.isBuffer(search)) {
+ search = Buffer.from(search)
+ }
- for await (const entry of source) {
- if (!entry) {
- continue
- }
+ offset = Number(offset || 0)
- tree = await addToTree(entry, tree, options)
+ if (isNaN(offset)) {
+ offset = 0
+ }
- if (!entry.unixfs || !entry.unixfs.isDirectory()) {
- yield entry
- }
+ if (offset < 0) {
+ offset = this.length + offset
}
- if (options.wrapWithDirectory) {
- yield * flushAndYield(tree, block)
- } else {
- for await (const unwrapped of tree.eachChildSeries()) {
- if (!unwrapped) {
- continue
- }
+ if (offset < 0) {
+ offset = 0
+ }
- yield * flushAndYield(unwrapped.child, block)
- }
+ if (search.length === 0) {
+ return offset > this.length ? this.length : offset
}
-}
-module.exports = treeBuilder
+ const blOffset = this._offset(offset)
+ let blIndex = blOffset[0] // index of which internal buffer we're working on
+ let buffOffset = blOffset[1] // offset of the internal buffer we're working on
+ // scan over each buffer
+ for (; blIndex < this._bufs.length; blIndex++) {
+ const buff = this._bufs[blIndex]
-/***/ }),
+ while (buffOffset < buff.length) {
+ const availableWindow = buff.length - buffOffset
-/***/ 8567:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ if (availableWindow >= search.length) {
+ const nativeSearchResult = buff.indexOf(search, buffOffset)
-"use strict";
+ if (nativeSearchResult !== -1) {
+ return this._reverseOffset([blIndex, nativeSearchResult])
+ }
+ buffOffset = buff.length - search.length + 1 // end of native search window
+ } else {
+ const revOffset = this._reverseOffset([blIndex, buffOffset])
-const { CID } = __nccwpck_require__(6447)
-const dagPb = __nccwpck_require__(8012)
-const { sha256 } = __nccwpck_require__(6987)
+ if (this._match(revOffset, search)) {
+ return revOffset
+ }
-/**
- * @param {Uint8Array} buffer
- * @param {import('interface-blockstore').Blockstore} blockstore
- * @param {import('../types').PersistOptions} options
- */
-const persist = async (buffer, blockstore, options) => {
- if (!options.codec) {
- options.codec = dagPb
- }
+ buffOffset++
+ }
+ }
- if (!options.hasher) {
- options.hasher = sha256
+ buffOffset = 0
}
- if (options.cidVersion === undefined) {
- options.cidVersion = 1
- }
+ return -1
+}
- if (options.codec === dagPb && options.hasher !== sha256) {
- options.cidVersion = 1
+BufferList.prototype._match = function (offset, search) {
+ if (this.length - offset < search.length) {
+ return false
}
- const multihash = await options.hasher.digest(buffer)
- const cid = CID.create(options.cidVersion, options.codec.code, multihash)
-
- if (!options.onlyHash) {
- await blockstore.put(cid, buffer, {
- signal: options.signal
- })
+ for (let searchOffset = 0; searchOffset < search.length; searchOffset++) {
+ if (this.get(offset + searchOffset) !== search[searchOffset]) {
+ return false
+ }
}
-
- return cid
+ return true
}
-module.exports = persist
-
-
-/***/ }),
-
-/***/ 6301:
-/***/ ((module) => {
+;(function () {
+ const methods = {
+ readDoubleBE: 8,
+ readDoubleLE: 8,
+ readFloatBE: 4,
+ readFloatLE: 4,
+ readInt32BE: 4,
+ readInt32LE: 4,
+ readUInt32BE: 4,
+ readUInt32LE: 4,
+ readInt16BE: 2,
+ readInt16LE: 2,
+ readUInt16BE: 2,
+ readUInt16LE: 2,
+ readInt8: 1,
+ readUInt8: 1,
+ readIntBE: null,
+ readIntLE: null,
+ readUIntBE: null,
+ readUIntLE: null
+ }
-"use strict";
+ for (const m in methods) {
+ (function (m) {
+ if (methods[m] === null) {
+ BufferList.prototype[m] = function (offset, byteLength) {
+ return this.slice(offset, offset + byteLength)[m](0, byteLength)
+ }
+ } else {
+ BufferList.prototype[m] = function (offset = 0) {
+ return this.slice(offset, offset + methods[m])[m](0)
+ }
+ }
+ }(m))
+ }
+}())
+// Used internally by the class and also as an indicator of this object being
+// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser
+// environment because there could be multiple different copies of the
+// BufferList class and some `BufferList`s might be `BufferList`s.
+BufferList.prototype._isBufferList = function _isBufferList (b) {
+ return b instanceof BufferList || BufferList.isBufferList(b)
+}
-const toPathComponents = (path = '') => {
- // split on / unless escaped with \
- return (path
- .trim()
- .match(/([^\\^/]|\\\/)+/g) || [])
- .filter(Boolean)
+BufferList.isBufferList = function isBufferList (b) {
+ return b != null && b[symbol]
}
-module.exports = toPathComponents
+module.exports = BufferList
/***/ }),
-/***/ 9811:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+/***/ 4103:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
-const {
- Data: PBData
-} = __nccwpck_require__(8699)
-const errcode = __nccwpck_require__(2997)
+Object.defineProperty(exports, "__esModule", ({ value: true }));
-/**
- * @typedef {import('./types').Mtime} Mtime
- * @typedef {import('./types').MtimeLike} MtimeLike
- */
+var errcode = __nccwpck_require__(2997);
+var unixfs = __nccwpck_require__(3385);
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
+
+var errcode__default = /*#__PURE__*/_interopDefaultLegacy(errcode);
+
+const PBData = unixfs.Data;
const types = [
'raw',
'directory',
@@ -13211,304 +11263,189 @@ const types = [
'metadata',
'symlink',
'hamt-sharded-directory'
-]
-
+];
const dirTypes = [
'directory',
'hamt-sharded-directory'
-]
-
-const DEFAULT_FILE_MODE = parseInt('0644', 8)
-const DEFAULT_DIRECTORY_MODE = parseInt('0755', 8)
-
-/**
- * @param {string | number | undefined} [mode]
- */
-function parseMode (mode) {
+];
+const DEFAULT_FILE_MODE = parseInt('0644', 8);
+const DEFAULT_DIRECTORY_MODE = parseInt('0755', 8);
+function parseMode(mode) {
if (mode == null) {
- return undefined
+ return undefined;
}
-
if (typeof mode === 'number') {
- return mode & 0xFFF
+ return mode & 4095;
}
-
- mode = mode.toString()
-
+ mode = mode.toString();
if (mode.substring(0, 1) === '0') {
- // octal string
- return parseInt(mode, 8) & 0xFFF
+ return parseInt(mode, 8) & 4095;
}
-
- // decimal string
- return parseInt(mode, 10) & 0xFFF
+ return parseInt(mode, 10) & 4095;
}
-
-/**
- * @param {any} input
- */
-function parseMtime (input) {
+function parseMtime(input) {
if (input == null) {
- return undefined
+ return undefined;
}
-
- /** @type {Mtime | undefined} */
- let mtime
-
- // { secs, nsecs }
+ let mtime;
if (input.secs != null) {
mtime = {
secs: input.secs,
nsecs: input.nsecs
- }
+ };
}
-
- // UnixFS TimeSpec
if (input.Seconds != null) {
mtime = {
secs: input.Seconds,
nsecs: input.FractionalNanoseconds
- }
+ };
}
-
- // process.hrtime()
if (Array.isArray(input)) {
mtime = {
secs: input[0],
nsecs: input[1]
- }
+ };
}
-
- // Javascript Date
if (input instanceof Date) {
- const ms = input.getTime()
- const secs = Math.floor(ms / 1000)
-
+ const ms = input.getTime();
+ const secs = Math.floor(ms / 1000);
mtime = {
secs: secs,
- nsecs: (ms - (secs * 1000)) * 1000
- }
- }
-
- /*
- TODO: https://github.com/ipfs/aegir/issues/487
-
- // process.hrtime.bigint()
- if (input instanceof BigInt) {
- const secs = input / BigInt(1e9)
- const nsecs = input - (secs * BigInt(1e9))
-
- mtime = {
- secs: parseInt(secs.toString()),
- nsecs: parseInt(nsecs.toString())
- }
+ nsecs: (ms - secs * 1000) * 1000
+ };
}
- */
-
if (!Object.prototype.hasOwnProperty.call(mtime, 'secs')) {
- return undefined
+ return undefined;
}
-
if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) {
- throw errcode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS')
+ throw errcode__default['default'](new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS');
}
-
- return mtime
+ return mtime;
}
-
-class Data {
- /**
- * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md
- *
- * @param {Uint8Array} marshaled
- */
- static unmarshal (marshaled) {
- const message = PBData.decode(marshaled)
+class UnixFS {
+ static unmarshal(marshaled) {
+ const message = PBData.decode(marshaled);
const decoded = PBData.toObject(message, {
defaults: false,
arrays: true,
longs: Number,
objects: false
- })
-
- const data = new Data({
+ });
+ const data = new UnixFS({
type: types[decoded.Type],
data: decoded.Data,
blockSizes: decoded.blocksizes,
mode: decoded.mode,
- mtime: decoded.mtime
- ? {
- secs: decoded.mtime.Seconds,
- nsecs: decoded.mtime.FractionalNanoseconds
- }
- : undefined
- })
-
- // make sure we honour the original mode
- data._originalMode = decoded.mode || 0
-
- return data
+ mtime: decoded.mtime ? {
+ secs: decoded.mtime.Seconds,
+ nsecs: decoded.mtime.FractionalNanoseconds
+ } : undefined
+ });
+ data._originalMode = decoded.mode || 0;
+ return data;
}
-
- /**
- * @param {object} [options]
- * @param {string} [options.type='file']
- * @param {Uint8Array} [options.data]
- * @param {number[]} [options.blockSizes]
- * @param {number} [options.hashType]
- * @param {number} [options.fanout]
- * @param {MtimeLike | null} [options.mtime]
- * @param {number | string} [options.mode]
- */
- constructor (options = {
- type: 'file'
- }) {
- const {
- type,
- data,
- blockSizes,
- hashType,
- fanout,
- mtime,
- mode
- } = options
-
+ constructor(options = { type: 'file' }) {
+ const {type, data, blockSizes, hashType, fanout, mtime, mode} = options;
if (type && !types.includes(type)) {
- throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')
+ throw errcode__default['default'](new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE');
}
-
- this.type = type || 'file'
- this.data = data
- this.hashType = hashType
- this.fanout = fanout
-
- /** @type {number[]} */
- this.blockSizes = blockSizes || []
- this._originalMode = 0
- this.mode = parseMode(mode)
-
+ this.type = type || 'file';
+ this.data = data;
+ this.hashType = hashType;
+ this.fanout = fanout;
+ this.blockSizes = blockSizes || [];
+ this._originalMode = 0;
+ this.mode = parseMode(mode);
if (mtime) {
- this.mtime = parseMtime(mtime)
-
+ this.mtime = parseMtime(mtime);
if (this.mtime && !this.mtime.nsecs) {
- this.mtime.nsecs = 0
+ this.mtime.nsecs = 0;
}
}
}
-
- /**
- * @param {number | undefined} mode
- */
- set mode (mode) {
- this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE
-
- const parsedMode = parseMode(mode)
-
+ set mode(mode) {
+ this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE;
+ const parsedMode = parseMode(mode);
if (parsedMode !== undefined) {
- this._mode = parsedMode
+ this._mode = parsedMode;
}
}
-
- /**
- * @returns {number | undefined}
- */
- get mode () {
- return this._mode
+ get mode() {
+ return this._mode;
}
-
- isDirectory () {
- return Boolean(this.type && dirTypes.includes(this.type))
+ isDirectory() {
+ return Boolean(this.type && dirTypes.includes(this.type));
}
-
- /**
- * @param {number} size
- */
- addBlockSize (size) {
- this.blockSizes.push(size)
+ addBlockSize(size) {
+ this.blockSizes.push(size);
}
-
- /**
- * @param {number} index
- */
- removeBlockSize (index) {
- this.blockSizes.splice(index, 1)
+ removeBlockSize(index) {
+ this.blockSizes.splice(index, 1);
}
-
- /**
- * Returns `0` for directories or `data.length + sum(blockSizes)` for everything else
- */
- fileSize () {
+ fileSize() {
if (this.isDirectory()) {
- // dirs don't have file size
- return 0
+ return 0;
}
-
- let sum = 0
- this.blockSizes.forEach((size) => {
- sum += size
- })
-
+ let sum = 0;
+ this.blockSizes.forEach(size => {
+ sum += size;
+ });
if (this.data) {
- sum += this.data.length
+ sum += this.data.length;
}
-
- return sum
+ return sum;
}
-
- /**
- * encode to protobuf Uint8Array
- */
- marshal () {
- let type
-
+ marshal() {
+ let type;
switch (this.type) {
- case 'raw': type = PBData.DataType.Raw; break
- case 'directory': type = PBData.DataType.Directory; break
- case 'file': type = PBData.DataType.File; break
- case 'metadata': type = PBData.DataType.Metadata; break
- case 'symlink': type = PBData.DataType.Symlink; break
- case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break
- default:
- throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')
+ case 'raw':
+ type = PBData.DataType.Raw;
+ break;
+ case 'directory':
+ type = PBData.DataType.Directory;
+ break;
+ case 'file':
+ type = PBData.DataType.File;
+ break;
+ case 'metadata':
+ type = PBData.DataType.Metadata;
+ break;
+ case 'symlink':
+ type = PBData.DataType.Symlink;
+ break;
+ case 'hamt-sharded-directory':
+ type = PBData.DataType.HAMTShard;
+ break;
+ default:
+ throw errcode__default['default'](new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE');
}
-
- let data = this.data
-
+ let data = this.data;
if (!this.data || !this.data.length) {
- data = undefined
+ data = undefined;
}
-
- let mode
-
+ let mode;
if (this.mode != null) {
- mode = (this._originalMode & 0xFFFFF000) | (parseMode(this.mode) || 0)
-
+ mode = this._originalMode & 4294963200 | (parseMode(this.mode) || 0);
if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {
- mode = undefined
+ mode = undefined;
}
-
if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {
- mode = undefined
+ mode = undefined;
}
}
-
- let mtime
-
+ let mtime;
if (this.mtime != null) {
- const parsed = parseMtime(this.mtime)
-
+ const parsed = parseMtime(this.mtime);
if (parsed) {
mtime = {
Seconds: parsed.secs,
FractionalNanoseconds: parsed.nsecs
- }
-
+ };
if (mtime.FractionalNanoseconds === 0) {
- delete mtime.FractionalNanoseconds
+ delete mtime.FractionalNanoseconds;
}
}
}
-
const pbData = {
Type: type,
Data: data,
@@ -13518,745 +11455,449 @@ class Data {
fanout: this.fanout,
mode,
mtime
- }
-
- return PBData.encode(pbData).finish()
+ };
+ return PBData.encode(pbData).finish();
}
}
-module.exports = {
- UnixFS: Data,
- parseMode,
- parseMtime
-}
+exports.UnixFS = UnixFS;
+exports.parseMode = parseMode;
+exports.parseMtime = parseMtime;
/***/ }),
-/***/ 8699:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+/***/ 3385:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
-/*eslint-disable*/
-
-var $protobuf = __nccwpck_require__(6916);
-
-// Common aliases
-var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
-// Exported root namespace
-var $root = $protobuf.roots["ipfs-unixfs"] || ($protobuf.roots["ipfs-unixfs"] = {});
+Object.defineProperty(exports, "__esModule", ({ value: true }));
-$root.Data = (function() {
+var $protobuf = __nccwpck_require__(6916);
- /**
- * Properties of a Data.
- * @exports IData
- * @interface IData
- * @property {Data.DataType} Type Data Type
- * @property {Uint8Array|null} [Data] Data Data
- * @property {number|null} [filesize] Data filesize
- * @property {Array.|null} [blocksizes] Data blocksizes
- * @property {number|null} [hashType] Data hashType
- * @property {number|null} [fanout] Data fanout
- * @property {number|null} [mode] Data mode
- * @property {IUnixTime|null} [mtime] Data mtime
- */
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
- /**
- * Constructs a new Data.
- * @exports Data
- * @classdesc Represents a Data.
- * @implements IData
- * @constructor
- * @param {IData=} [p] Properties to set
- */
- function Data(p) {
- this.blocksizes = [];
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
+var $protobuf__default = /*#__PURE__*/_interopDefaultLegacy($protobuf);
+
+const $Reader = $protobuf__default['default'].Reader, $Writer = $protobuf__default['default'].Writer, $util = $protobuf__default['default'].util;
+const $root = $protobuf__default['default'].roots['ipfs-unixfs'] || ($protobuf__default['default'].roots['ipfs-unixfs'] = {});
+const Data = $root.Data = (() => {
+ function Data(p) {
+ this.blocksizes = [];
+ if (p)
+ for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
+ if (p[ks[i]] != null)
+ this[ks[i]] = p[ks[i]];
+ }
+ Data.prototype.Type = 0;
+ Data.prototype.Data = $util.newBuffer([]);
+ Data.prototype.filesize = $util.Long ? $util.Long.fromBits(0, 0, true) : 0;
+ Data.prototype.blocksizes = $util.emptyArray;
+ Data.prototype.hashType = $util.Long ? $util.Long.fromBits(0, 0, true) : 0;
+ Data.prototype.fanout = $util.Long ? $util.Long.fromBits(0, 0, true) : 0;
+ Data.prototype.mode = 0;
+ Data.prototype.mtime = null;
+ Data.encode = function encode(m, w) {
+ if (!w)
+ w = $Writer.create();
+ w.uint32(8).int32(m.Type);
+ if (m.Data != null && Object.hasOwnProperty.call(m, 'Data'))
+ w.uint32(18).bytes(m.Data);
+ if (m.filesize != null && Object.hasOwnProperty.call(m, 'filesize'))
+ w.uint32(24).uint64(m.filesize);
+ if (m.blocksizes != null && m.blocksizes.length) {
+ for (var i = 0; i < m.blocksizes.length; ++i)
+ w.uint32(32).uint64(m.blocksizes[i]);
+ }
+ if (m.hashType != null && Object.hasOwnProperty.call(m, 'hashType'))
+ w.uint32(40).uint64(m.hashType);
+ if (m.fanout != null && Object.hasOwnProperty.call(m, 'fanout'))
+ w.uint32(48).uint64(m.fanout);
+ if (m.mode != null && Object.hasOwnProperty.call(m, 'mode'))
+ w.uint32(56).uint32(m.mode);
+ if (m.mtime != null && Object.hasOwnProperty.call(m, 'mtime'))
+ $root.UnixTime.encode(m.mtime, w.uint32(66).fork()).ldelim();
+ return w;
+ };
+ Data.decode = function decode(r, l) {
+ if (!(r instanceof $Reader))
+ r = $Reader.create(r);
+ var c = l === undefined ? r.len : r.pos + l, m = new $root.Data();
+ while (r.pos < c) {
+ var t = r.uint32();
+ switch (t >>> 3) {
+ case 1:
+ m.Type = r.int32();
+ break;
+ case 2:
+ m.Data = r.bytes();
+ break;
+ case 3:
+ m.filesize = r.uint64();
+ break;
+ case 4:
+ if (!(m.blocksizes && m.blocksizes.length))
+ m.blocksizes = [];
+ if ((t & 7) === 2) {
+ var c2 = r.uint32() + r.pos;
+ while (r.pos < c2)
+ m.blocksizes.push(r.uint64());
+ } else
+ m.blocksizes.push(r.uint64());
+ break;
+ case 5:
+ m.hashType = r.uint64();
+ break;
+ case 6:
+ m.fanout = r.uint64();
+ break;
+ case 7:
+ m.mode = r.uint32();
+ break;
+ case 8:
+ m.mtime = $root.UnixTime.decode(r, r.uint32());
+ break;
+ default:
+ r.skipType(t & 7);
+ break;
+ }
}
-
- /**
- * Data Type.
- * @member {Data.DataType} Type
- * @memberof Data
- * @instance
- */
- Data.prototype.Type = 0;
-
- /**
- * Data Data.
- * @member {Uint8Array} Data
- * @memberof Data
- * @instance
- */
- Data.prototype.Data = $util.newBuffer([]);
-
- /**
- * Data filesize.
- * @member {number} filesize
- * @memberof Data
- * @instance
- */
- Data.prototype.filesize = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
-
- /**
- * Data blocksizes.
- * @member {Array.} blocksizes
- * @memberof Data
- * @instance
- */
- Data.prototype.blocksizes = $util.emptyArray;
-
- /**
- * Data hashType.
- * @member {number} hashType
- * @memberof Data
- * @instance
- */
- Data.prototype.hashType = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
-
- /**
- * Data fanout.
- * @member {number} fanout
- * @memberof Data
- * @instance
- */
- Data.prototype.fanout = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
-
- /**
- * Data mode.
- * @member {number} mode
- * @memberof Data
- * @instance
- */
- Data.prototype.mode = 0;
-
- /**
- * Data mtime.
- * @member {IUnixTime|null|undefined} mtime
- * @memberof Data
- * @instance
- */
- Data.prototype.mtime = null;
-
- /**
- * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages.
- * @function encode
- * @memberof Data
- * @static
- * @param {IData} m Data message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- Data.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- w.uint32(8).int32(m.Type);
- if (m.Data != null && Object.hasOwnProperty.call(m, "Data"))
- w.uint32(18).bytes(m.Data);
- if (m.filesize != null && Object.hasOwnProperty.call(m, "filesize"))
- w.uint32(24).uint64(m.filesize);
- if (m.blocksizes != null && m.blocksizes.length) {
- for (var i = 0; i < m.blocksizes.length; ++i)
- w.uint32(32).uint64(m.blocksizes[i]);
- }
- if (m.hashType != null && Object.hasOwnProperty.call(m, "hashType"))
- w.uint32(40).uint64(m.hashType);
- if (m.fanout != null && Object.hasOwnProperty.call(m, "fanout"))
- w.uint32(48).uint64(m.fanout);
- if (m.mode != null && Object.hasOwnProperty.call(m, "mode"))
- w.uint32(56).uint32(m.mode);
- if (m.mtime != null && Object.hasOwnProperty.call(m, "mtime"))
- $root.UnixTime.encode(m.mtime, w.uint32(66).fork()).ldelim();
- return w;
- };
-
- /**
- * Decodes a Data message from the specified reader or buffer.
- * @function decode
- * @memberof Data
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {Data} Data
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- Data.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.Data();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.Type = r.int32();
- break;
- case 2:
- m.Data = r.bytes();
- break;
- case 3:
- m.filesize = r.uint64();
- break;
- case 4:
- if (!(m.blocksizes && m.blocksizes.length))
- m.blocksizes = [];
- if ((t & 7) === 2) {
- var c2 = r.uint32() + r.pos;
- while (r.pos < c2)
- m.blocksizes.push(r.uint64());
- } else
- m.blocksizes.push(r.uint64());
- break;
- case 5:
- m.hashType = r.uint64();
- break;
- case 6:
- m.fanout = r.uint64();
- break;
- case 7:
- m.mode = r.uint32();
- break;
- case 8:
- m.mtime = $root.UnixTime.decode(r, r.uint32());
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- if (!m.hasOwnProperty("Type"))
- throw $util.ProtocolError("missing required 'Type'", { instance: m });
- return m;
- };
-
- /**
- * Creates a Data message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof Data
- * @static
- * @param {Object.} d Plain object
- * @returns {Data} Data
- */
- Data.fromObject = function fromObject(d) {
- if (d instanceof $root.Data)
- return d;
- var m = new $root.Data();
- switch (d.Type) {
- case "Raw":
- case 0:
- m.Type = 0;
- break;
- case "Directory":
- case 1:
- m.Type = 1;
- break;
- case "File":
- case 2:
- m.Type = 2;
- break;
- case "Metadata":
- case 3:
- m.Type = 3;
- break;
- case "Symlink":
- case 4:
- m.Type = 4;
- break;
- case "HAMTShard":
- case 5:
- m.Type = 5;
- break;
- }
- if (d.Data != null) {
- if (typeof d.Data === "string")
- $util.base64.decode(d.Data, m.Data = $util.newBuffer($util.base64.length(d.Data)), 0);
- else if (d.Data.length)
- m.Data = d.Data;
- }
- if (d.filesize != null) {
- if ($util.Long)
- (m.filesize = $util.Long.fromValue(d.filesize)).unsigned = true;
- else if (typeof d.filesize === "string")
- m.filesize = parseInt(d.filesize, 10);
- else if (typeof d.filesize === "number")
- m.filesize = d.filesize;
- else if (typeof d.filesize === "object")
- m.filesize = new $util.LongBits(d.filesize.low >>> 0, d.filesize.high >>> 0).toNumber(true);
- }
- if (d.blocksizes) {
- if (!Array.isArray(d.blocksizes))
- throw TypeError(".Data.blocksizes: array expected");
- m.blocksizes = [];
- for (var i = 0; i < d.blocksizes.length; ++i) {
- if ($util.Long)
- (m.blocksizes[i] = $util.Long.fromValue(d.blocksizes[i])).unsigned = true;
- else if (typeof d.blocksizes[i] === "string")
- m.blocksizes[i] = parseInt(d.blocksizes[i], 10);
- else if (typeof d.blocksizes[i] === "number")
- m.blocksizes[i] = d.blocksizes[i];
- else if (typeof d.blocksizes[i] === "object")
- m.blocksizes[i] = new $util.LongBits(d.blocksizes[i].low >>> 0, d.blocksizes[i].high >>> 0).toNumber(true);
- }
- }
- if (d.hashType != null) {
- if ($util.Long)
- (m.hashType = $util.Long.fromValue(d.hashType)).unsigned = true;
- else if (typeof d.hashType === "string")
- m.hashType = parseInt(d.hashType, 10);
- else if (typeof d.hashType === "number")
- m.hashType = d.hashType;
- else if (typeof d.hashType === "object")
- m.hashType = new $util.LongBits(d.hashType.low >>> 0, d.hashType.high >>> 0).toNumber(true);
- }
- if (d.fanout != null) {
- if ($util.Long)
- (m.fanout = $util.Long.fromValue(d.fanout)).unsigned = true;
- else if (typeof d.fanout === "string")
- m.fanout = parseInt(d.fanout, 10);
- else if (typeof d.fanout === "number")
- m.fanout = d.fanout;
- else if (typeof d.fanout === "object")
- m.fanout = new $util.LongBits(d.fanout.low >>> 0, d.fanout.high >>> 0).toNumber(true);
- }
- if (d.mode != null) {
- m.mode = d.mode >>> 0;
- }
- if (d.mtime != null) {
- if (typeof d.mtime !== "object")
- throw TypeError(".Data.mtime: object expected");
- m.mtime = $root.UnixTime.fromObject(d.mtime);
- }
- return m;
- };
-
- /**
- * Creates a plain object from a Data message. Also converts values to other types if specified.
- * @function toObject
- * @memberof Data
- * @static
- * @param {Data} m Data
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- Data.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.arrays || o.defaults) {
- d.blocksizes = [];
- }
- if (o.defaults) {
- d.Type = o.enums === String ? "Raw" : 0;
- if (o.bytes === String)
- d.Data = "";
- else {
- d.Data = [];
- if (o.bytes !== Array)
- d.Data = $util.newBuffer(d.Data);
- }
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.filesize = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.filesize = o.longs === String ? "0" : 0;
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.hashType = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.hashType = o.longs === String ? "0" : 0;
- if ($util.Long) {
- var n = new $util.Long(0, 0, true);
- d.fanout = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.fanout = o.longs === String ? "0" : 0;
- d.mode = 0;
- d.mtime = null;
- }
- if (m.Type != null && m.hasOwnProperty("Type")) {
- d.Type = o.enums === String ? $root.Data.DataType[m.Type] : m.Type;
- }
- if (m.Data != null && m.hasOwnProperty("Data")) {
- d.Data = o.bytes === String ? $util.base64.encode(m.Data, 0, m.Data.length) : o.bytes === Array ? Array.prototype.slice.call(m.Data) : m.Data;
- }
- if (m.filesize != null && m.hasOwnProperty("filesize")) {
- if (typeof m.filesize === "number")
- d.filesize = o.longs === String ? String(m.filesize) : m.filesize;
- else
- d.filesize = o.longs === String ? $util.Long.prototype.toString.call(m.filesize) : o.longs === Number ? new $util.LongBits(m.filesize.low >>> 0, m.filesize.high >>> 0).toNumber(true) : m.filesize;
- }
- if (m.blocksizes && m.blocksizes.length) {
- d.blocksizes = [];
- for (var j = 0; j < m.blocksizes.length; ++j) {
- if (typeof m.blocksizes[j] === "number")
- d.blocksizes[j] = o.longs === String ? String(m.blocksizes[j]) : m.blocksizes[j];
- else
- d.blocksizes[j] = o.longs === String ? $util.Long.prototype.toString.call(m.blocksizes[j]) : o.longs === Number ? new $util.LongBits(m.blocksizes[j].low >>> 0, m.blocksizes[j].high >>> 0).toNumber(true) : m.blocksizes[j];
- }
- }
- if (m.hashType != null && m.hasOwnProperty("hashType")) {
- if (typeof m.hashType === "number")
- d.hashType = o.longs === String ? String(m.hashType) : m.hashType;
- else
- d.hashType = o.longs === String ? $util.Long.prototype.toString.call(m.hashType) : o.longs === Number ? new $util.LongBits(m.hashType.low >>> 0, m.hashType.high >>> 0).toNumber(true) : m.hashType;
- }
- if (m.fanout != null && m.hasOwnProperty("fanout")) {
- if (typeof m.fanout === "number")
- d.fanout = o.longs === String ? String(m.fanout) : m.fanout;
- else
- d.fanout = o.longs === String ? $util.Long.prototype.toString.call(m.fanout) : o.longs === Number ? new $util.LongBits(m.fanout.low >>> 0, m.fanout.high >>> 0).toNumber(true) : m.fanout;
- }
- if (m.mode != null && m.hasOwnProperty("mode")) {
- d.mode = m.mode;
- }
- if (m.mtime != null && m.hasOwnProperty("mtime")) {
- d.mtime = $root.UnixTime.toObject(m.mtime, o);
- }
- return d;
- };
-
- /**
- * Converts this Data to JSON.
- * @function toJSON
- * @memberof Data
- * @instance
- * @returns {Object.} JSON object
- */
- Data.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
-
- /**
- * DataType enum.
- * @name Data.DataType
- * @enum {number}
- * @property {number} Raw=0 Raw value
- * @property {number} Directory=1 Directory value
- * @property {number} File=2 File value
- * @property {number} Metadata=3 Metadata value
- * @property {number} Symlink=4 Symlink value
- * @property {number} HAMTShard=5 HAMTShard value
- */
- Data.DataType = (function() {
- var valuesById = {}, values = Object.create(valuesById);
- values[valuesById[0] = "Raw"] = 0;
- values[valuesById[1] = "Directory"] = 1;
- values[valuesById[2] = "File"] = 2;
- values[valuesById[3] = "Metadata"] = 3;
- values[valuesById[4] = "Symlink"] = 4;
- values[valuesById[5] = "HAMTShard"] = 5;
- return values;
- })();
-
- return Data;
+ if (!m.hasOwnProperty('Type'))
+ throw $util.ProtocolError('missing required \'Type\'', { instance: m });
+ return m;
+ };
+ Data.fromObject = function fromObject(d) {
+ if (d instanceof $root.Data)
+ return d;
+ var m = new $root.Data();
+ switch (d.Type) {
+ case 'Raw':
+ case 0:
+ m.Type = 0;
+ break;
+ case 'Directory':
+ case 1:
+ m.Type = 1;
+ break;
+ case 'File':
+ case 2:
+ m.Type = 2;
+ break;
+ case 'Metadata':
+ case 3:
+ m.Type = 3;
+ break;
+ case 'Symlink':
+ case 4:
+ m.Type = 4;
+ break;
+ case 'HAMTShard':
+ case 5:
+ m.Type = 5;
+ break;
+ }
+ if (d.Data != null) {
+ if (typeof d.Data === 'string')
+ $util.base64.decode(d.Data, m.Data = $util.newBuffer($util.base64.length(d.Data)), 0);
+ else if (d.Data.length)
+ m.Data = d.Data;
+ }
+ if (d.filesize != null) {
+ if ($util.Long)
+ (m.filesize = $util.Long.fromValue(d.filesize)).unsigned = true;
+ else if (typeof d.filesize === 'string')
+ m.filesize = parseInt(d.filesize, 10);
+ else if (typeof d.filesize === 'number')
+ m.filesize = d.filesize;
+ else if (typeof d.filesize === 'object')
+ m.filesize = new $util.LongBits(d.filesize.low >>> 0, d.filesize.high >>> 0).toNumber(true);
+ }
+ if (d.blocksizes) {
+ if (!Array.isArray(d.blocksizes))
+ throw TypeError('.Data.blocksizes: array expected');
+ m.blocksizes = [];
+ for (var i = 0; i < d.blocksizes.length; ++i) {
+ if ($util.Long)
+ (m.blocksizes[i] = $util.Long.fromValue(d.blocksizes[i])).unsigned = true;
+ else if (typeof d.blocksizes[i] === 'string')
+ m.blocksizes[i] = parseInt(d.blocksizes[i], 10);
+ else if (typeof d.blocksizes[i] === 'number')
+ m.blocksizes[i] = d.blocksizes[i];
+ else if (typeof d.blocksizes[i] === 'object')
+ m.blocksizes[i] = new $util.LongBits(d.blocksizes[i].low >>> 0, d.blocksizes[i].high >>> 0).toNumber(true);
+ }
+ }
+ if (d.hashType != null) {
+ if ($util.Long)
+ (m.hashType = $util.Long.fromValue(d.hashType)).unsigned = true;
+ else if (typeof d.hashType === 'string')
+ m.hashType = parseInt(d.hashType, 10);
+ else if (typeof d.hashType === 'number')
+ m.hashType = d.hashType;
+ else if (typeof d.hashType === 'object')
+ m.hashType = new $util.LongBits(d.hashType.low >>> 0, d.hashType.high >>> 0).toNumber(true);
+ }
+ if (d.fanout != null) {
+ if ($util.Long)
+ (m.fanout = $util.Long.fromValue(d.fanout)).unsigned = true;
+ else if (typeof d.fanout === 'string')
+ m.fanout = parseInt(d.fanout, 10);
+ else if (typeof d.fanout === 'number')
+ m.fanout = d.fanout;
+ else if (typeof d.fanout === 'object')
+ m.fanout = new $util.LongBits(d.fanout.low >>> 0, d.fanout.high >>> 0).toNumber(true);
+ }
+ if (d.mode != null) {
+ m.mode = d.mode >>> 0;
+ }
+ if (d.mtime != null) {
+ if (typeof d.mtime !== 'object')
+ throw TypeError('.Data.mtime: object expected');
+ m.mtime = $root.UnixTime.fromObject(d.mtime);
+ }
+ return m;
+ };
+ Data.toObject = function toObject(m, o) {
+ if (!o)
+ o = {};
+ var d = {};
+ if (o.arrays || o.defaults) {
+ d.blocksizes = [];
+ }
+ if (o.defaults) {
+ d.Type = o.enums === String ? 'Raw' : 0;
+ if (o.bytes === String)
+ d.Data = '';
+ else {
+ d.Data = [];
+ if (o.bytes !== Array)
+ d.Data = $util.newBuffer(d.Data);
+ }
+ if ($util.Long) {
+ var n = new $util.Long(0, 0, true);
+ d.filesize = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
+ } else
+ d.filesize = o.longs === String ? '0' : 0;
+ if ($util.Long) {
+ var n = new $util.Long(0, 0, true);
+ d.hashType = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
+ } else
+ d.hashType = o.longs === String ? '0' : 0;
+ if ($util.Long) {
+ var n = new $util.Long(0, 0, true);
+ d.fanout = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
+ } else
+ d.fanout = o.longs === String ? '0' : 0;
+ d.mode = 0;
+ d.mtime = null;
+ }
+ if (m.Type != null && m.hasOwnProperty('Type')) {
+ d.Type = o.enums === String ? $root.Data.DataType[m.Type] : m.Type;
+ }
+ if (m.Data != null && m.hasOwnProperty('Data')) {
+ d.Data = o.bytes === String ? $util.base64.encode(m.Data, 0, m.Data.length) : o.bytes === Array ? Array.prototype.slice.call(m.Data) : m.Data;
+ }
+ if (m.filesize != null && m.hasOwnProperty('filesize')) {
+ if (typeof m.filesize === 'number')
+ d.filesize = o.longs === String ? String(m.filesize) : m.filesize;
+ else
+ d.filesize = o.longs === String ? $util.Long.prototype.toString.call(m.filesize) : o.longs === Number ? new $util.LongBits(m.filesize.low >>> 0, m.filesize.high >>> 0).toNumber(true) : m.filesize;
+ }
+ if (m.blocksizes && m.blocksizes.length) {
+ d.blocksizes = [];
+ for (var j = 0; j < m.blocksizes.length; ++j) {
+ if (typeof m.blocksizes[j] === 'number')
+ d.blocksizes[j] = o.longs === String ? String(m.blocksizes[j]) : m.blocksizes[j];
+ else
+ d.blocksizes[j] = o.longs === String ? $util.Long.prototype.toString.call(m.blocksizes[j]) : o.longs === Number ? new $util.LongBits(m.blocksizes[j].low >>> 0, m.blocksizes[j].high >>> 0).toNumber(true) : m.blocksizes[j];
+ }
+ }
+ if (m.hashType != null && m.hasOwnProperty('hashType')) {
+ if (typeof m.hashType === 'number')
+ d.hashType = o.longs === String ? String(m.hashType) : m.hashType;
+ else
+ d.hashType = o.longs === String ? $util.Long.prototype.toString.call(m.hashType) : o.longs === Number ? new $util.LongBits(m.hashType.low >>> 0, m.hashType.high >>> 0).toNumber(true) : m.hashType;
+ }
+ if (m.fanout != null && m.hasOwnProperty('fanout')) {
+ if (typeof m.fanout === 'number')
+ d.fanout = o.longs === String ? String(m.fanout) : m.fanout;
+ else
+ d.fanout = o.longs === String ? $util.Long.prototype.toString.call(m.fanout) : o.longs === Number ? new $util.LongBits(m.fanout.low >>> 0, m.fanout.high >>> 0).toNumber(true) : m.fanout;
+ }
+ if (m.mode != null && m.hasOwnProperty('mode')) {
+ d.mode = m.mode;
+ }
+ if (m.mtime != null && m.hasOwnProperty('mtime')) {
+ d.mtime = $root.UnixTime.toObject(m.mtime, o);
+ }
+ return d;
+ };
+ Data.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf__default['default'].util.toJSONOptions);
+ };
+ Data.DataType = function () {
+ const valuesById = {}, values = Object.create(valuesById);
+ values[valuesById[0] = 'Raw'] = 0;
+ values[valuesById[1] = 'Directory'] = 1;
+ values[valuesById[2] = 'File'] = 2;
+ values[valuesById[3] = 'Metadata'] = 3;
+ values[valuesById[4] = 'Symlink'] = 4;
+ values[valuesById[5] = 'HAMTShard'] = 5;
+ return values;
+ }();
+ return Data;
})();
-
-$root.UnixTime = (function() {
-
- /**
- * Properties of an UnixTime.
- * @exports IUnixTime
- * @interface IUnixTime
- * @property {number} Seconds UnixTime Seconds
- * @property {number|null} [FractionalNanoseconds] UnixTime FractionalNanoseconds
- */
-
- /**
- * Constructs a new UnixTime.
- * @exports UnixTime
- * @classdesc Represents an UnixTime.
- * @implements IUnixTime
- * @constructor
- * @param {IUnixTime=} [p] Properties to set
- */
- function UnixTime(p) {
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
+const UnixTime = $root.UnixTime = (() => {
+ function UnixTime(p) {
+ if (p)
+ for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
+ if (p[ks[i]] != null)
+ this[ks[i]] = p[ks[i]];
+ }
+ UnixTime.prototype.Seconds = $util.Long ? $util.Long.fromBits(0, 0, false) : 0;
+ UnixTime.prototype.FractionalNanoseconds = 0;
+ UnixTime.encode = function encode(m, w) {
+ if (!w)
+ w = $Writer.create();
+ w.uint32(8).int64(m.Seconds);
+ if (m.FractionalNanoseconds != null && Object.hasOwnProperty.call(m, 'FractionalNanoseconds'))
+ w.uint32(21).fixed32(m.FractionalNanoseconds);
+ return w;
+ };
+ UnixTime.decode = function decode(r, l) {
+ if (!(r instanceof $Reader))
+ r = $Reader.create(r);
+ var c = l === undefined ? r.len : r.pos + l, m = new $root.UnixTime();
+ while (r.pos < c) {
+ var t = r.uint32();
+ switch (t >>> 3) {
+ case 1:
+ m.Seconds = r.int64();
+ break;
+ case 2:
+ m.FractionalNanoseconds = r.fixed32();
+ break;
+ default:
+ r.skipType(t & 7);
+ break;
+ }
}
-
- /**
- * UnixTime Seconds.
- * @member {number} Seconds
- * @memberof UnixTime
- * @instance
- */
- UnixTime.prototype.Seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0;
-
- /**
- * UnixTime FractionalNanoseconds.
- * @member {number} FractionalNanoseconds
- * @memberof UnixTime
- * @instance
- */
- UnixTime.prototype.FractionalNanoseconds = 0;
-
- /**
- * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages.
- * @function encode
- * @memberof UnixTime
- * @static
- * @param {IUnixTime} m UnixTime message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- UnixTime.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- w.uint32(8).int64(m.Seconds);
- if (m.FractionalNanoseconds != null && Object.hasOwnProperty.call(m, "FractionalNanoseconds"))
- w.uint32(21).fixed32(m.FractionalNanoseconds);
- return w;
- };
-
- /**
- * Decodes an UnixTime message from the specified reader or buffer.
- * @function decode
- * @memberof UnixTime
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {UnixTime} UnixTime
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- UnixTime.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.UnixTime();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.Seconds = r.int64();
- break;
- case 2:
- m.FractionalNanoseconds = r.fixed32();
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- if (!m.hasOwnProperty("Seconds"))
- throw $util.ProtocolError("missing required 'Seconds'", { instance: m });
- return m;
- };
-
- /**
- * Creates an UnixTime message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof UnixTime
- * @static
- * @param {Object.} d Plain object
- * @returns {UnixTime} UnixTime
- */
- UnixTime.fromObject = function fromObject(d) {
- if (d instanceof $root.UnixTime)
- return d;
- var m = new $root.UnixTime();
- if (d.Seconds != null) {
- if ($util.Long)
- (m.Seconds = $util.Long.fromValue(d.Seconds)).unsigned = false;
- else if (typeof d.Seconds === "string")
- m.Seconds = parseInt(d.Seconds, 10);
- else if (typeof d.Seconds === "number")
- m.Seconds = d.Seconds;
- else if (typeof d.Seconds === "object")
- m.Seconds = new $util.LongBits(d.Seconds.low >>> 0, d.Seconds.high >>> 0).toNumber();
- }
- if (d.FractionalNanoseconds != null) {
- m.FractionalNanoseconds = d.FractionalNanoseconds >>> 0;
- }
- return m;
- };
-
- /**
- * Creates a plain object from an UnixTime message. Also converts values to other types if specified.
- * @function toObject
- * @memberof UnixTime
- * @static
- * @param {UnixTime} m UnixTime
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- UnixTime.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.defaults) {
- if ($util.Long) {
- var n = new $util.Long(0, 0, false);
- d.Seconds = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
- } else
- d.Seconds = o.longs === String ? "0" : 0;
- d.FractionalNanoseconds = 0;
- }
- if (m.Seconds != null && m.hasOwnProperty("Seconds")) {
- if (typeof m.Seconds === "number")
- d.Seconds = o.longs === String ? String(m.Seconds) : m.Seconds;
- else
- d.Seconds = o.longs === String ? $util.Long.prototype.toString.call(m.Seconds) : o.longs === Number ? new $util.LongBits(m.Seconds.low >>> 0, m.Seconds.high >>> 0).toNumber() : m.Seconds;
- }
- if (m.FractionalNanoseconds != null && m.hasOwnProperty("FractionalNanoseconds")) {
- d.FractionalNanoseconds = m.FractionalNanoseconds;
- }
- return d;
- };
-
- /**
- * Converts this UnixTime to JSON.
- * @function toJSON
- * @memberof UnixTime
- * @instance
- * @returns {Object.} JSON object
- */
- UnixTime.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
-
- return UnixTime;
+ if (!m.hasOwnProperty('Seconds'))
+ throw $util.ProtocolError('missing required \'Seconds\'', { instance: m });
+ return m;
+ };
+ UnixTime.fromObject = function fromObject(d) {
+ if (d instanceof $root.UnixTime)
+ return d;
+ var m = new $root.UnixTime();
+ if (d.Seconds != null) {
+ if ($util.Long)
+ (m.Seconds = $util.Long.fromValue(d.Seconds)).unsigned = false;
+ else if (typeof d.Seconds === 'string')
+ m.Seconds = parseInt(d.Seconds, 10);
+ else if (typeof d.Seconds === 'number')
+ m.Seconds = d.Seconds;
+ else if (typeof d.Seconds === 'object')
+ m.Seconds = new $util.LongBits(d.Seconds.low >>> 0, d.Seconds.high >>> 0).toNumber();
+ }
+ if (d.FractionalNanoseconds != null) {
+ m.FractionalNanoseconds = d.FractionalNanoseconds >>> 0;
+ }
+ return m;
+ };
+ UnixTime.toObject = function toObject(m, o) {
+ if (!o)
+ o = {};
+ var d = {};
+ if (o.defaults) {
+ if ($util.Long) {
+ var n = new $util.Long(0, 0, false);
+ d.Seconds = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
+ } else
+ d.Seconds = o.longs === String ? '0' : 0;
+ d.FractionalNanoseconds = 0;
+ }
+ if (m.Seconds != null && m.hasOwnProperty('Seconds')) {
+ if (typeof m.Seconds === 'number')
+ d.Seconds = o.longs === String ? String(m.Seconds) : m.Seconds;
+ else
+ d.Seconds = o.longs === String ? $util.Long.prototype.toString.call(m.Seconds) : o.longs === Number ? new $util.LongBits(m.Seconds.low >>> 0, m.Seconds.high >>> 0).toNumber() : m.Seconds;
+ }
+ if (m.FractionalNanoseconds != null && m.hasOwnProperty('FractionalNanoseconds')) {
+ d.FractionalNanoseconds = m.FractionalNanoseconds;
+ }
+ return d;
+ };
+ UnixTime.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf__default['default'].util.toJSONOptions);
+ };
+ return UnixTime;
})();
-
-$root.Metadata = (function() {
-
- /**
- * Properties of a Metadata.
- * @exports IMetadata
- * @interface IMetadata
- * @property {string|null} [MimeType] Metadata MimeType
- */
-
- /**
- * Constructs a new Metadata.
- * @exports Metadata
- * @classdesc Represents a Metadata.
- * @implements IMetadata
- * @constructor
- * @param {IMetadata=} [p] Properties to set
- */
- function Metadata(p) {
- if (p)
- for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
- if (p[ks[i]] != null)
- this[ks[i]] = p[ks[i]];
+const Metadata = $root.Metadata = (() => {
+ function Metadata(p) {
+ if (p)
+ for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
+ if (p[ks[i]] != null)
+ this[ks[i]] = p[ks[i]];
+ }
+ Metadata.prototype.MimeType = '';
+ Metadata.encode = function encode(m, w) {
+ if (!w)
+ w = $Writer.create();
+ if (m.MimeType != null && Object.hasOwnProperty.call(m, 'MimeType'))
+ w.uint32(10).string(m.MimeType);
+ return w;
+ };
+ Metadata.decode = function decode(r, l) {
+ if (!(r instanceof $Reader))
+ r = $Reader.create(r);
+ var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();
+ while (r.pos < c) {
+ var t = r.uint32();
+ switch (t >>> 3) {
+ case 1:
+ m.MimeType = r.string();
+ break;
+ default:
+ r.skipType(t & 7);
+ break;
+ }
}
-
- /**
- * Metadata MimeType.
- * @member {string} MimeType
- * @memberof Metadata
- * @instance
- */
- Metadata.prototype.MimeType = "";
-
- /**
- * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.
- * @function encode
- * @memberof Metadata
- * @static
- * @param {IMetadata} m Metadata message or plain object to encode
- * @param {$protobuf.Writer} [w] Writer to encode to
- * @returns {$protobuf.Writer} Writer
- */
- Metadata.encode = function encode(m, w) {
- if (!w)
- w = $Writer.create();
- if (m.MimeType != null && Object.hasOwnProperty.call(m, "MimeType"))
- w.uint32(10).string(m.MimeType);
- return w;
- };
-
- /**
- * Decodes a Metadata message from the specified reader or buffer.
- * @function decode
- * @memberof Metadata
- * @static
- * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
- * @param {number} [l] Message length if known beforehand
- * @returns {Metadata} Metadata
- * @throws {Error} If the payload is not a reader or valid buffer
- * @throws {$protobuf.util.ProtocolError} If required fields are missing
- */
- Metadata.decode = function decode(r, l) {
- if (!(r instanceof $Reader))
- r = $Reader.create(r);
- var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();
- while (r.pos < c) {
- var t = r.uint32();
- switch (t >>> 3) {
- case 1:
- m.MimeType = r.string();
- break;
- default:
- r.skipType(t & 7);
- break;
- }
- }
- return m;
- };
-
- /**
- * Creates a Metadata message from a plain object. Also converts values to their respective internal types.
- * @function fromObject
- * @memberof Metadata
- * @static
- * @param {Object.} d Plain object
- * @returns {Metadata} Metadata
- */
- Metadata.fromObject = function fromObject(d) {
- if (d instanceof $root.Metadata)
- return d;
- var m = new $root.Metadata();
- if (d.MimeType != null) {
- m.MimeType = String(d.MimeType);
- }
- return m;
- };
-
- /**
- * Creates a plain object from a Metadata message. Also converts values to other types if specified.
- * @function toObject
- * @memberof Metadata
- * @static
- * @param {Metadata} m Metadata
- * @param {$protobuf.IConversionOptions} [o] Conversion options
- * @returns {Object.} Plain object
- */
- Metadata.toObject = function toObject(m, o) {
- if (!o)
- o = {};
- var d = {};
- if (o.defaults) {
- d.MimeType = "";
- }
- if (m.MimeType != null && m.hasOwnProperty("MimeType")) {
- d.MimeType = m.MimeType;
- }
- return d;
- };
-
- /**
- * Converts this Metadata to JSON.
- * @function toJSON
- * @memberof Metadata
- * @instance
- * @returns {Object.} JSON object
- */
- Metadata.prototype.toJSON = function toJSON() {
- return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
- };
-
- return Metadata;
+ return m;
+ };
+ Metadata.fromObject = function fromObject(d) {
+ if (d instanceof $root.Metadata)
+ return d;
+ var m = new $root.Metadata();
+ if (d.MimeType != null) {
+ m.MimeType = String(d.MimeType);
+ }
+ return m;
+ };
+ Metadata.toObject = function toObject(m, o) {
+ if (!o)
+ o = {};
+ var d = {};
+ if (o.defaults) {
+ d.MimeType = '';
+ }
+ if (m.MimeType != null && m.hasOwnProperty('MimeType')) {
+ d.MimeType = m.MimeType;
+ }
+ return d;
+ };
+ Metadata.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf__default['default'].util.toJSONOptions);
+ };
+ return Metadata;
})();
-module.exports = $root;
+exports.Data = Data;
+exports.Metadata = Metadata;
+exports.UnixTime = UnixTime;
+exports.default = $root;
/***/ }),
@@ -15931,11 +13572,7 @@ class Decoder {
}
}
or(decoder) {
- const decoders = {
- [this.prefix]: this,
- ...decoder.decoders || { [decoder.prefix]: decoder }
- };
- return new ComposedDecoder(decoders);
+ return or(this, decoder);
}
}
class ComposedDecoder {
@@ -15943,11 +13580,7 @@ class ComposedDecoder {
this.decoders = decoders;
}
or(decoder) {
- const other = decoder.decoders || { [decoder.prefix]: decoder };
- return new ComposedDecoder({
- ...this.decoders,
- ...other
- });
+ return or(this, decoder);
}
decode(input) {
const prefix = input[0];
@@ -15959,6 +13592,10 @@ class ComposedDecoder {
}
}
}
+const or = (left, right) => new ComposedDecoder({
+ ...left.decoders || { [left.prefix]: left },
+ ...right.decoders || { [right.prefix]: right }
+});
class Codec {
constructor(name, prefix, baseEncode, baseDecode) {
this.name = name;
@@ -16055,6 +13692,7 @@ const rfc4648 = ({name, prefix, bitsPerChar, alphabet}) => {
exports.Codec = Codec;
exports.baseX = baseX;
exports.from = from;
+exports.or = or;
exports.rfc4648 = rfc4648;
@@ -16993,12 +14631,12 @@ exports.CID = CID;
Object.defineProperty(exports, "__esModule", ({ value: true }));
-const {name, code, encode, decode} = {
- name: 'json',
- code: 512,
- encode: json => new TextEncoder().encode(JSON.stringify(json)),
- decode: bytes => JSON.parse(new TextDecoder().decode(bytes))
-};
+const textEncoder = new TextEncoder();
+const textDecoder = new TextDecoder();
+const name = 'json';
+const code = 512;
+const encode = node => textEncoder.encode(JSON.stringify(node));
+const decode = data => JSON.parse(textDecoder.decode(data));
exports.code = code;
exports.decode = decode;
@@ -17018,13 +14656,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
var bytes = __nccwpck_require__(6507);
-const raw = bytes$1 => bytes.coerce(bytes$1);
-const {name, code, encode, decode} = {
- name: 'raw',
- code: 85,
- decode: raw,
- encode: raw
-};
+const name = 'raw';
+const code = 85;
+const encode = node => bytes.coerce(node);
+const decode = data => bytes.coerce(data);
exports.code = code;
exports.decode = decode;
@@ -17163,12 +14798,12 @@ var crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto);
const sha256 = hasher.from({
name: 'sha2-256',
code: 18,
- encode: input => bytes.coerce(crypto__default['default'].createHash('sha256').update(input).digest())
+ encode: input => bytes.coerce(crypto__default["default"].createHash('sha256').update(input).digest())
});
const sha512 = hasher.from({
name: 'sha2-512',
code: 19,
- encode: input => bytes.coerce(crypto__default['default'].createHash('sha512').update(input).digest())
+ encode: input => bytes.coerce(crypto__default["default"].createHash('sha512').update(input).digest())
});
exports.sha256 = sha256;
@@ -17436,8 +15071,9 @@ var varint = {
encodingLength: length
};
var _brrp_varint = varint;
+var varint$1 = _brrp_varint;
-module.exports = _brrp_varint;
+module.exports = varint$1;
/***/ }),
@@ -18108,6 +15744,86 @@ module.exports.default = pRetry;
module.exports.AbortError = AbortError;
+/***/ }),
+
+/***/ 1940:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+var qs = __nccwpck_require__(1191)
+ , url = __nccwpck_require__(8835)
+ , xtend = __nccwpck_require__(1208);
+
+const PARSE_LINK_HEADER_MAXLEN = parseInt(process.env.PARSE_LINK_HEADER_MAXLEN) || 2000;
+const PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED = process.env.PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED != null
+
+function hasRel(x) {
+ return x && x.rel;
+}
+
+function intoRels (acc, x) {
+ function splitRel (rel) {
+ acc[rel] = xtend(x, { rel: rel });
+ }
+
+ x.rel.split(/\s+/).forEach(splitRel);
+
+ return acc;
+}
+
+function createObjects (acc, p) {
+ // rel="next" => 1: rel 2: next
+ var m = p.match(/\s*(.+)\s*=\s*"?([^"]+)"?/)
+ if (m) acc[m[1]] = m[2];
+ return acc;
+}
+
+function parseLink(link) {
+ try {
+ var m = link.match(/([^>]*)>(.*)/)
+ , linkUrl = m[1]
+ , parts = m[2].split(';')
+ , parsedUrl = url.parse(linkUrl)
+ , qry = qs.parse(parsedUrl.query);
+
+ parts.shift();
+
+ var info = parts
+ .reduce(createObjects, {});
+
+ info = xtend(qry, info);
+ info.url = linkUrl;
+ return info;
+ } catch (e) {
+ return null;
+ }
+}
+
+function checkHeader(linkHeader){
+ if (!linkHeader) return false;
+
+ if (linkHeader.length > PARSE_LINK_HEADER_MAXLEN) {
+ if (PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED) {
+ throw new Error('Input string too long, it should be under ' + PARSE_LINK_HEADER_MAXLEN + ' characters.');
+ } else {
+ return false;
+ }
+ }
+ return true;
+}
+
+module.exports = function (linkHeader) {
+ if (!checkHeader(linkHeader)) return null;
+
+ return linkHeader.split(/,\s*)
+ .map(parseLink)
+ .filter(hasRel)
+ .reduce(intoRels, {});
+};
+
+
/***/ }),
/***/ 6916:
@@ -21555,325 +19271,415 @@ function valueOnly (elem) {
/***/ }),
-/***/ 6399:
-/***/ ((module) => {
-
-"use strict";
-
-
-/**
- * Can be used with Array.sort to sort and array with Uint8Array entries
- *
- * @param {Uint8Array} a
- * @param {Uint8Array} b
- */
-function compare (a, b) {
- for (let i = 0; i < a.byteLength; i++) {
- if (a[i] < b[i]) {
- return -1
- }
-
- if (a[i] > b[i]) {
- return 1
- }
- }
-
- if (a.byteLength > b.byteLength) {
- return 1
- }
-
- if (a.byteLength < b.byteLength) {
- return -1
- }
-
- return 0
-}
+/***/ 4294:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-module.exports = compare
+module.exports = __nccwpck_require__(4219);
/***/ }),
-/***/ 7952:
-/***/ ((module) => {
+/***/ 4219:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
-/**
- * Returns a new Uint8Array created by concatenating the passed ArrayLikes
- *
- * @param {Array>} arrays
- * @param {number} [length]
- */
-function concat (arrays, length) {
- if (!length) {
- length = arrays.reduce((acc, curr) => acc + curr.length, 0)
- }
+var net = __nccwpck_require__(1631);
+var tls = __nccwpck_require__(4016);
+var http = __nccwpck_require__(8605);
+var https = __nccwpck_require__(7211);
+var events = __nccwpck_require__(8614);
+var assert = __nccwpck_require__(2357);
+var util = __nccwpck_require__(1669);
- const output = new Uint8Array(length)
- let offset = 0
- for (const arr of arrays) {
- output.set(arr, offset)
- offset += arr.length
- }
+exports.httpOverHttp = httpOverHttp;
+exports.httpsOverHttp = httpsOverHttp;
+exports.httpOverHttps = httpOverHttps;
+exports.httpsOverHttps = httpsOverHttps;
- return output
+
+function httpOverHttp(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = http.request;
+ return agent;
}
-module.exports = concat
+function httpsOverHttp(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = http.request;
+ agent.createSocket = createSecureSocket;
+ agent.defaultPort = 443;
+ return agent;
+}
+function httpOverHttps(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = https.request;
+ return agent;
+}
-/***/ }),
+function httpsOverHttps(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = https.request;
+ agent.createSocket = createSecureSocket;
+ agent.defaultPort = 443;
+ return agent;
+}
-/***/ 333:
-/***/ ((module) => {
-"use strict";
+function TunnelingAgent(options) {
+ var self = this;
+ self.options = options || {};
+ self.proxyOptions = self.options.proxy || {};
+ self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
+ self.requests = [];
+ self.sockets = [];
+
+ self.on('free', function onFree(socket, host, port, localAddress) {
+ var options = toOptions(host, port, localAddress);
+ for (var i = 0, len = self.requests.length; i < len; ++i) {
+ var pending = self.requests[i];
+ if (pending.host === options.host && pending.port === options.port) {
+ // Detect the request to connect same origin server,
+ // reuse the connection.
+ self.requests.splice(i, 1);
+ pending.request.onSocket(socket);
+ return;
+ }
+ }
+ socket.destroy();
+ self.removeSocket(socket);
+ });
+}
+util.inherits(TunnelingAgent, events.EventEmitter);
+TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
+ var self = this;
+ var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
-/**
- * Returns true if the two passed Uint8Arrays have the same content
- *
- * @param {Uint8Array} a
- * @param {Uint8Array} b
- */
-function equals (a, b) {
- if (a === b) {
- return true
+ if (self.sockets.length >= this.maxSockets) {
+ // We are over limit so we'll add it to the queue.
+ self.requests.push(options);
+ return;
}
- if (a.byteLength !== b.byteLength) {
- return false
- }
+ // If we are under maxSockets create a new one.
+ self.createSocket(options, function(socket) {
+ socket.on('free', onFree);
+ socket.on('close', onCloseOrRemove);
+ socket.on('agentRemove', onCloseOrRemove);
+ req.onSocket(socket);
- for (let i = 0; i < a.byteLength; i++) {
- if (a[i] !== b[i]) {
- return false
+ function onFree() {
+ self.emit('free', socket, options);
}
- }
- return true
-}
+ function onCloseOrRemove(err) {
+ self.removeSocket(socket);
+ socket.removeListener('free', onFree);
+ socket.removeListener('close', onCloseOrRemove);
+ socket.removeListener('agentRemove', onCloseOrRemove);
+ }
+ });
+};
-module.exports = equals
+TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
+ var self = this;
+ var placeholder = {};
+ self.sockets.push(placeholder);
+ var connectOptions = mergeOptions({}, self.proxyOptions, {
+ method: 'CONNECT',
+ path: options.host + ':' + options.port,
+ agent: false,
+ headers: {
+ host: options.host + ':' + options.port
+ }
+ });
+ if (options.localAddress) {
+ connectOptions.localAddress = options.localAddress;
+ }
+ if (connectOptions.proxyAuth) {
+ connectOptions.headers = connectOptions.headers || {};
+ connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
+ new Buffer(connectOptions.proxyAuth).toString('base64');
+ }
-/***/ }),
+ debug('making CONNECT request');
+ var connectReq = self.request(connectOptions);
+ connectReq.useChunkedEncodingByDefault = false; // for v0.6
+ connectReq.once('response', onResponse); // for v0.6
+ connectReq.once('upgrade', onUpgrade); // for v0.6
+ connectReq.once('connect', onConnect); // for v0.7 or later
+ connectReq.once('error', onError);
+ connectReq.end();
-/***/ 828:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+ function onResponse(res) {
+ // Very hacky. This is necessary to avoid http-parser leaks.
+ res.upgrade = true;
+ }
-"use strict";
+ function onUpgrade(res, socket, head) {
+ // Hacky.
+ process.nextTick(function() {
+ onConnect(res, socket, head);
+ });
+ }
+ function onConnect(res, socket, head) {
+ connectReq.removeAllListeners();
+ socket.removeAllListeners();
-const bases = __nccwpck_require__(6043)
+ if (res.statusCode !== 200) {
+ debug('tunneling socket could not be established, statusCode=%d',
+ res.statusCode);
+ socket.destroy();
+ var error = new Error('tunneling socket could not be established, ' +
+ 'statusCode=' + res.statusCode);
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ return;
+ }
+ if (head.length > 0) {
+ debug('got illegal response body from proxy');
+ socket.destroy();
+ var error = new Error('got illegal response body from proxy');
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ return;
+ }
+ debug('tunneling connection has established');
+ self.sockets[self.sockets.indexOf(placeholder)] = socket;
+ return cb(socket);
+ }
-/**
- * @typedef {import('./util/bases').SupportedEncodings} SupportedEncodings
- */
+ function onError(cause) {
+ connectReq.removeAllListeners();
-/**
- * Create a `Uint8Array` from the passed string
- *
- * Supports `utf8`, `utf-8`, `hex`, and any encoding supported by the multiformats module.
- *
- * Also `ascii` which is similar to node's 'binary' encoding.
- *
- * @param {string} string
- * @param {SupportedEncodings} [encoding=utf8] - utf8, base16, base64, base64urlpad, etc
- * @returns {Uint8Array}
- */
-function fromString (string, encoding = 'utf8') {
- const base = bases[encoding]
+ debug('tunneling socket could not be established, cause=%s\n',
+ cause.message, cause.stack);
+ var error = new Error('tunneling socket could not be established, ' +
+ 'cause=' + cause.message);
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ }
+};
- if (!base) {
- throw new Error(`Unsupported encoding "${encoding}"`)
+TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
+ var pos = this.sockets.indexOf(socket)
+ if (pos === -1) {
+ return;
}
+ this.sockets.splice(pos, 1);
- // add multibase prefix
- return base.decoder.decode(`${base.prefix}${string}`)
-}
+ var pending = this.requests.shift();
+ if (pending) {
+ // If we have pending requests and a socket gets closed a new one
+ // needs to be created to take over in the pool for the one that closed.
+ this.createSocket(pending, function(socket) {
+ pending.request.onSocket(socket);
+ });
+ }
+};
-module.exports = fromString
+function createSecureSocket(options, cb) {
+ var self = this;
+ TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
+ var hostHeader = options.request.getHeader('host');
+ var tlsOptions = mergeOptions({}, self.options, {
+ socket: socket,
+ servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
+ });
+ // 0 is dummy port for v0.6
+ var secureSocket = tls.connect(0, tlsOptions);
+ self.sockets[self.sockets.indexOf(socket)] = secureSocket;
+ cb(secureSocket);
+ });
+}
-/***/ }),
-/***/ 5804:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+function toOptions(host, port, localAddress) {
+ if (typeof host === 'string') { // since v0.10
+ return {
+ host: host,
+ port: port,
+ localAddress: localAddress
+ };
+ }
+ return host; // for v0.11 or later
+}
-"use strict";
+function mergeOptions(target) {
+ for (var i = 1, len = arguments.length; i < len; ++i) {
+ var overrides = arguments[i];
+ if (typeof overrides === 'object') {
+ var keys = Object.keys(overrides);
+ for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
+ var k = keys[j];
+ if (overrides[k] !== undefined) {
+ target[k] = overrides[k];
+ }
+ }
+ }
+ }
+ return target;
+}
-const compare = __nccwpck_require__(6399)
-const concat = __nccwpck_require__(7952)
-const equals = __nccwpck_require__(333)
-const fromString = __nccwpck_require__(828)
-const toString = __nccwpck_require__(757)
-const xor = __nccwpck_require__(8281)
-module.exports = {
- compare,
- concat,
- equals,
- fromString,
- toString,
- xor
+var debug;
+if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
+ debug = function() {
+ var args = Array.prototype.slice.call(arguments);
+ if (typeof args[0] === 'string') {
+ args[0] = 'TUNNEL: ' + args[0];
+ } else {
+ args.unshift('TUNNEL:');
+ }
+ console.error.apply(console, args);
+ }
+} else {
+ debug = function() {};
}
+exports.debug = debug; // for test
/***/ }),
-/***/ 757:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+/***/ 5114:
+/***/ ((__unused_webpack_module, exports) => {
"use strict";
-const bases = __nccwpck_require__(6043)
-
-/**
- * @typedef {import('./util/bases').SupportedEncodings} SupportedEncodings
- */
-
-/**
- * Turns a `Uint8Array` into a string.
- *
- * Supports `utf8`, `utf-8` and any encoding supported by the multibase module.
- *
- * Also `ascii` which is similar to node's 'binary' encoding.
- *
- * @param {Uint8Array} array - The array to turn into a string
- * @param {SupportedEncodings} [encoding=utf8] - The encoding to use
- * @returns {string}
- */
-function toString (array, encoding = 'utf8') {
- const base = bases[encoding]
+Object.defineProperty(exports, "__esModule", ({ value: true }));
- if (!base) {
- throw new Error(`Unsupported encoding "${encoding}"`)
+function concat(arrays, length) {
+ if (!length) {
+ length = arrays.reduce((acc, curr) => acc + curr.length, 0);
}
-
- // strip multibase prefix
- return base.encoder.encode(array).substring(1)
+ const output = new Uint8Array(length);
+ let offset = 0;
+ for (const arr of arrays) {
+ output.set(arr, offset);
+ offset += arr.length;
+ }
+ return output;
}
-module.exports = toString
+exports.concat = concat;
/***/ }),
-/***/ 6043:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+/***/ 9192:
+/***/ ((__unused_webpack_module, exports) => {
"use strict";
-
-const { bases } = __nccwpck_require__(1046)
-/**
- * @typedef {import('multiformats/bases/interface').MultibaseCodec} MultibaseCodec
- */
+Object.defineProperty(exports, "__esModule", ({ value: true }));
-/**
- * @param {string} name
- * @param {string} prefix
- * @param {(buf: Uint8Array) => string} encode
- * @param {(str: string) => Uint8Array} decode
- * @returns {MultibaseCodec}
- */
-function createCodec (name, prefix, encode, decode) {
- return {
- name,
- prefix,
- encoder: {
- name,
- prefix,
- encode
- },
- decoder: {
- decode
+function equals(a, b) {
+ if (a === b) {
+ return true;
+ }
+ if (a.byteLength !== b.byteLength) {
+ return false;
+ }
+ for (let i = 0; i < a.byteLength; i++) {
+ if (a[i] !== b[i]) {
+ return false;
}
}
+ return true;
}
-const string = createCodec('utf8', 'u', (buf) => {
- const decoder = new TextDecoder('utf8')
- return 'u' + decoder.decode(buf)
-}, (str) => {
- const encoder = new TextEncoder()
- return encoder.encode(str.substring(1))
-})
+exports.equals = equals;
+
-const ascii = createCodec('ascii', 'a', (buf) => {
- let string = 'a'
+/***/ }),
- for (let i = 0; i < buf.length; i++) {
- string += String.fromCharCode(buf[i])
- }
- return string
-}, (str) => {
- str = str.substring(1)
- const buf = new Uint8Array(str.length)
+/***/ 3538:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
- for (let i = 0; i < str.length; i++) {
- buf[i] = str.charCodeAt(i)
- }
+"use strict";
- return buf
-})
-/**
- * @typedef {'utf8' | 'utf-8' | 'hex' | 'latin1' | 'ascii' | 'binary' | keyof bases } SupportedEncodings
- */
+Object.defineProperty(exports, "__esModule", ({ value: true }));
-/**
- * @type {Record}
- */
-const BASES = {
- 'utf8': string,
- 'utf-8': string,
- 'hex': bases.base16,
- 'latin1': ascii,
- 'ascii': ascii,
- 'binary': ascii,
+var bases = __nccwpck_require__(8552);
- ...bases
+function fromString(string, encoding = 'utf8') {
+ const base = bases[encoding];
+ if (!base) {
+ throw new Error(`Unsupported encoding "${ encoding }"`);
+ }
+ return base.decoder.decode(`${ base.prefix }${ string }`);
}
-module.exports = BASES
+exports.fromString = fromString;
/***/ }),
-/***/ 8281:
-/***/ ((module) => {
+/***/ 8552:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-/**
- * Returns the xor distance between two arrays
- *
- * @param {Uint8Array} a
- * @param {Uint8Array} b
- */
-function xor (a, b) {
- if (a.length !== b.length) {
- throw new Error('Inputs should have the same length')
- }
-
- const result = new Uint8Array(a.length)
-
- for (let i = 0; i < a.length; i++) {
- result[i] = a[i] ^ b[i]
- }
+var basics = __nccwpck_require__(1046);
- return result
+function createCodec(name, prefix, encode, decode) {
+ return {
+ name,
+ prefix,
+ encoder: {
+ name,
+ prefix,
+ encode
+ },
+ decoder: { decode }
+ };
}
+const string = createCodec('utf8', 'u', buf => {
+ const decoder = new TextDecoder('utf8');
+ return 'u' + decoder.decode(buf);
+}, str => {
+ const encoder = new TextEncoder();
+ return encoder.encode(str.substring(1));
+});
+const ascii = createCodec('ascii', 'a', buf => {
+ let string = 'a';
+ for (let i = 0; i < buf.length; i++) {
+ string += String.fromCharCode(buf[i]);
+ }
+ return string;
+}, str => {
+ str = str.substring(1);
+ const buf = new Uint8Array(str.length);
+ for (let i = 0; i < str.length; i++) {
+ buf[i] = str.charCodeAt(i);
+ }
+ return buf;
+});
+const BASES = {
+ utf8: string,
+ 'utf-8': string,
+ hex: basics.bases.base16,
+ latin1: ascii,
+ ascii: ascii,
+ binary: ascii,
+ ...basics.bases
+};
-module.exports = xor
+module.exports = BASES;
/***/ }),
@@ -26017,118 +23823,28 @@ exports.TextDecoder =
/***/ }),
-/***/ 9641:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-
-"use strict";
-
+/***/ 1208:
+/***/ ((module) => {
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+module.exports = extend
-var Path = __nccwpck_require__(5622);
-var fs = __nccwpck_require__(5747);
-var glob = __nccwpck_require__(402);
-var errCode = __nccwpck_require__(2997);
+var hasOwnProperty = Object.prototype.hasOwnProperty;
-function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
+function extend() {
+ var target = {}
-var Path__default = /*#__PURE__*/_interopDefaultLegacy(Path);
-var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
-var glob__default = /*#__PURE__*/_interopDefaultLegacy(glob);
-var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);
+ for (var i = 0; i < arguments.length; i++) {
+ var source = arguments[i]
-async function getFilesFromPath(paths, options) {
- const files = [];
- for await (const file of filesFromPath(paths, options)) {
- files.push(file);
- }
- return files;
-}
-async function* filesFromPath(paths, options) {
- options = options || {};
- if (typeof paths === 'string') {
- paths = [paths];
- }
- const globSourceOptions = {
- recursive: true,
- glob: {
- dot: Boolean(options.hidden),
- ignore: Array.isArray(options.ignore) ? options.ignore : [],
- follow: options.followSymlinks != null ? options.followSymlinks : true
- }
- };
- for await (const path of paths) {
- if (typeof path !== 'string') {
- throw errCode__default['default'](new Error('Path must be a string'), 'ERR_INVALID_PATH', { path });
- }
- const absolutePath = Path__default['default'].resolve(process.cwd(), path);
- const stat = await fs.promises.stat(absolutePath);
- const prefix = Path__default['default'].dirname(absolutePath);
- let mode = options.mode;
- if (options.preserveMode) {
- mode = stat.mode;
- }
- let mtime = options.mtime;
- if (options.preserveMtime) {
- mtime = stat.mtime;
- }
- yield* toGlobSource({
- path,
- type: stat.isDirectory() ? 'dir' : 'file',
- prefix,
- mode,
- mtime,
- size: stat.size,
- preserveMode: options.preserveMode,
- preserveMtime: options.preserveMtime
- }, globSourceOptions);
- }
-}
-async function* toGlobSource({path, type, prefix, mode, mtime, size, preserveMode, preserveMtime}, options) {
- options = options || {};
- const baseName = Path__default['default'].basename(path);
- if (type === 'file') {
- yield {
- name: `/${ baseName.replace(prefix, '') }`,
- stream: () => fs__default['default'].createReadStream(Path__default['default'].isAbsolute(path) ? path : Path__default['default'].join(process.cwd(), path)),
- mode,
- mtime,
- size
- };
- return;
- }
- const globOptions = Object.assign({}, options.glob, {
- cwd: path,
- nodir: false,
- realpath: false,
- absolute: true
- });
- for await (const p of glob__default['default'](path, '**/*', globOptions)) {
- const stat = await fs.promises.stat(p);
- if (!stat.isFile()) {
- continue;
- }
- if (preserveMode || preserveMtime) {
- if (preserveMode) {
- mode = stat.mode;
- }
- if (preserveMtime) {
- mtime = stat.mtime;
- }
+ for (var key in source) {
+ if (hasOwnProperty.call(source, key)) {
+ target[key] = source[key]
+ }
+ }
}
- yield {
- name: toPosix(p.replace(prefix, '')),
- stream: () => fs__default['default'].createReadStream(p),
- mode,
- mtime,
- size: stat.size
- };
- }
-}
-const toPosix = path => path.replace(/\\/g, '/');
-exports.filesFromPath = filesFromPath;
-exports.getFilesFromPath = getFilesFromPath;
+ return target
+}
/***/ }),
@@ -26141,7 +23857,7 @@ const { Web3Storage } = __nccwpck_require__(8100)
async function addToWeb3 ({ endpoint, token, pathToAdd, name, wrapWithDirectory = false }) {
const web3 = new Web3Storage({ endpoint, token })
- const files = await getFilesFromPath(`${pathToAdd}`)
+ const files = await getFilesFromPath(pathToAdd)
const cid = await web3.put(files, { name, wrapWithDirectory })
const url = `https://dweb.link/ipfs/${cid}`
return { cid, url }
@@ -28389,9 +26105,11 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
var streamingIterables = __nccwpck_require__(8205);
var pRetry = __nccwpck_require__(2548);
var pack = __nccwpck_require__(8163);
+var parseLink = __nccwpck_require__(1940);
var unpack = __nccwpck_require__(3428);
var treewalk = __nccwpck_require__(6025);
-var filesFromPath = __nccwpck_require__(9641);
+var car = __nccwpck_require__(2805);
+var filesFromPath = __nccwpck_require__(5090);
var fetch = __nccwpck_require__(9681);
var blob = __nccwpck_require__(5343);
var file = __nccwpck_require__(7905);
@@ -28400,6 +26118,7 @@ var fs = __nccwpck_require__(2689);
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var pRetry__default = /*#__PURE__*/_interopDefaultLegacy(pRetry);
+var parseLink__default = /*#__PURE__*/_interopDefaultLegacy(parseLink);
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
/**
@@ -28424,11 +26143,13 @@ const MAX_CHUNK_SIZE = 1024 * 1024 * 10; // chunk to ~10MB CARs
/** @typedef { import('./lib/interface.js').API } API */
/** @typedef { import('./lib/interface.js').Status} Status */
+/** @typedef { import('./lib/interface.js').Upload} Upload */
/** @typedef { import('./lib/interface.js').Service } Service */
/** @typedef { import('./lib/interface.js').Web3File} Web3File */
/** @typedef { import('./lib/interface.js').Filelike } Filelike */
/** @typedef { import('./lib/interface.js').CIDString} CIDString */
/** @typedef { import('./lib/interface.js').PutOptions} PutOptions */
+/** @typedef { import('./lib/interface.js').PutCarOptions} PutCarOptions */
/** @typedef { import('./lib/interface.js').UnixFSEntry} UnixFSEntry */
/** @typedef { import('./lib/interface.js').Web3Response} Web3Response */
@@ -28465,12 +26186,13 @@ class Web3Storage {
/**
* @hidden
* @param {string} token
+ * @returns {Record}
*/
static headers (token) {
if (!token) throw new Error('missing token')
return {
Authorization: `Bearer ${token}`,
- 'X-Client': 'web3.storage'
+ 'X-Client': 'web3.storage/js'
}
}
@@ -28487,21 +26209,7 @@ class Web3Storage {
wrapWithDirectory = true,
name
} = {}) {
- const url = new URL('/car', endpoint);
- const targetSize = MAX_CHUNK_SIZE;
- let headers = Web3Storage.headers(token);
-
- if (name) {
- headers = {
- ...headers,
- // @ts-ignore 'X-Name' does not exist in type inferred
- 'X-Name': name
- };
- }
-
- let carRoot;
const blockstore = new fs.FsBlockStore();
-
try {
const { out, root } = await pack.pack({
input: Array.from(files).map((f) => ({
@@ -28509,54 +26217,86 @@ class Web3Storage {
content: f.stream()
})),
blockstore,
- wrapWithDirectory
+ wrapWithDirectory,
+ maxChunkSize: 1048576,
+ maxChildrenPerNode: 1024
});
- carRoot = root.toString();
+ onRootCidReady && onRootCidReady(root.toString());
+ const car$1 = await car.CarReader.fromIterable(out);
+ return await Web3Storage.putCar({ endpoint, token }, car$1, { onStoredChunk, maxRetries, name })
+ } finally {
+ await blockstore.close();
+ }
+ }
+
+ /**
+ * @param {Service} service
+ * @param {import('@ipld/car/api').CarReader} car
+ * @param {PutCarOptions} [options]
+ * @returns {Promise}
+ */
+ static async putCar ({ endpoint, token }, car, {
+ name,
+ onStoredChunk,
+ maxRetries = MAX_PUT_RETRIES,
+ decoders
+ } = {}) {
+ const targetSize = MAX_CHUNK_SIZE;
+ const url = new URL('car', endpoint);
+ let headers = Web3Storage.headers(token);
- onRootCidReady && onRootCidReady(carRoot);
+ if (name) {
+ headers = { ...headers, 'X-Name': encodeURIComponent(name) };
+ }
+
+ const roots = await car.getRoots();
+ if (roots[0] == null) {
+ throw new Error('missing root CID')
+ }
+ if (roots.length > 1) {
+ throw new Error('too many roots')
+ }
- const splitter = await treewalk.TreewalkCarSplitter.fromIterable(out, targetSize);
+ const carRoot = roots[0].toString();
+ const splitter = new treewalk.TreewalkCarSplitter(car, targetSize, { decoders });
- const upload = streamingIterables.transform(
- MAX_CONCURRENT_UPLOADS,
- async (/** @type {AsyncIterable} */ car) => {
- const carParts = [];
- for await (const part of car) {
- carParts.push(part);
- }
+ /**
+ * @param {AsyncIterable} car
+ * @returns {Promise}
+ */
+ const onCarChunk = async car => {
+ const carParts = [];
+ for await (const part of car) {
+ carParts.push(part);
+ }
- const carFile = new blob.Blob(carParts, {
- type: 'application/car'
+ const carFile = new blob.Blob(carParts, { type: 'application/car' });
+ const res = await pRetry__default['default'](
+ async () => {
+ const request = await fetch__default['default'](url.toString(), {
+ method: 'POST',
+ headers,
+ body: carFile
});
+ const res = await request.json();
+ if (!request.ok) {
+ throw new Error(res.message)
+ }
- const res = await pRetry__default['default'](
- async () => {
- const request = await fetch__default['default'](url.toString(), {
- method: 'POST',
- headers,
- body: carFile
- });
- const res = await request.json();
-
- if (request.ok) {
- return res.cid
- } else {
- throw new Error(res.message)
- }
- },
- { retries: maxRetries }
- );
- onStoredChunk && onStoredChunk(carFile.size);
- return res
- }
+ if (res.cid !== carRoot) {
+ throw new Error(`root CID mismatch, expected: ${carRoot}, received: ${res.cid}`)
+ }
+ return res.cid
+ },
+ { retries: maxRetries }
);
- for await (const _ of upload(splitter.cars())) {} // eslint-disable-line
- } finally {
- // Close Blockstore
- await blockstore.close();
- }
+ onStoredChunk && onStoredChunk(carFile.size);
+ return res
+ };
+ const upload = streamingIterables.transform(MAX_CONCURRENT_UPLOADS, onCarChunk);
+ for await (const _ of upload(splitter.cars())) {} // eslint-disable-line
return carRoot
}
@@ -28566,20 +26306,11 @@ class Web3Storage {
* @returns {Promise}
*/
static async get ({ endpoint, token }, cid) {
- const url = new URL(`/car/${cid}`, endpoint);
+ const url = new URL(`car/${cid}`, endpoint);
const res = await fetch__default['default'](url.toString(), {
method: 'GET',
headers: Web3Storage.headers(token)
});
- if (!res.ok) {
- // TODO: I'm assuming that an error for "CID isn't there (yet)" would be unergonomic. Need to verify.
- // I'm thinking null means, nope, not yet, no can has. Anything else is _AN ERROR_
- if (res.status === 404) {
- return null
- } else {
- throw new Error(`${res.status} ${res.statusText}`)
- }
- }
return toWeb3Response(res)
}
@@ -28590,7 +26321,7 @@ class Web3Storage {
*/
/* c8 ignore next 4 */
static async delete ({ endpoint, token }, cid) {
- console.log('Not deleteing', cid, endpoint, token);
+ console.log('Not deleting', cid, endpoint, token);
throw Error('.delete not implemented yet')
}
@@ -28600,7 +26331,7 @@ class Web3Storage {
* @returns {Promise}
*/
static async status ({ endpoint, token }, cid) {
- const url = new URL(`/status/${cid}`, endpoint);
+ const url = new URL(`status/${cid}`, endpoint);
const res = await fetch__default['default'](url.toString(), {
method: 'GET',
headers: Web3Storage.headers(token)
@@ -28614,6 +26345,48 @@ class Web3Storage {
return res.json()
}
+ /**
+ * @param {Service} service
+ * @param {object} [opts]
+ * @param {string} [opts.before] list items uploaded before this ISO 8601 date string
+ * @param {number} [opts.maxResults] maximum number of results to return
+ * @returns {AsyncIterable}
+ */
+ static async * list (service, { before = new Date().toISOString(), maxResults = Infinity } = {}) {
+ /**
+ * @param {Service} service
+ * @param {{before: string, size: number}} opts
+ * @returns {Promise}
+ */
+ function listPage ({ endpoint, token }, { before, size }) {
+ const search = new URLSearchParams({ before, size: size.toString() });
+ const url = new URL(`user/uploads?${search}`, endpoint);
+ return fetch__default['default'](url.toString(), {
+ method: 'GET',
+ headers: {
+ ...Web3Storage.headers(token),
+ 'Access-Control-Request-Headers': 'Link'
+ }
+ })
+ }
+ let count = 0;
+ const size = maxResults > 100 ? 100 : maxResults;
+ for await (const res of paginator(listPage, service, { before, size })) {
+ if (!res.ok) {
+ /* c8 ignore next 2 */
+ const errorMessage = await res.json();
+ throw new Error(`${res.status} ${res.statusText} ${errorMessage ? '- ' + errorMessage.message : ''}`)
+ }
+ const page = await res.json();
+ for (const upload of page) {
+ if (++count > maxResults) {
+ return
+ }
+ yield upload;
+ }
+ }
+ }
+
// Just a sugar so you don't have to pass around endpoint and token around.
/**
@@ -28636,7 +26409,52 @@ class Web3Storage {
}
/**
- * Fetch the Content Addressed Archive by it's root CID.
+ * Uploads a CAR ([Content Addressed Archive](https://github.com/ipld/specs/blob/master/block-layer/content-addressable-archives.md)) file to web3.storage.
+ * Takes a CarReader interface from @ipld/car
+ *
+ * Returns the corresponding Content Identifier (CID).
+ *
+ * @example
+ * ```js
+ * import fs from 'fs'
+ * import { Readable } from 'stream'
+ * import { CarReader, CarWriter } from '@ipld/car'
+ * import * as raw from 'multiformats/codecs/raw'
+ * import { CID } from 'multiformats/cid'
+ * import { sha256 } from 'multiformats/hashes/sha2'
+ *
+ * async function getCar() {
+ * const bytes = new TextEncoder().encode('random meaningless bytes')
+ * const hash = await sha256.digest(raw.encode(bytes))
+ * const cid = CID.create(1, raw.code, hash)
+ *
+ * // create the writer and set the header with a single root
+ * const { writer, out } = await CarWriter.create([cid])
+ * Readable.from(out).pipe(fs.createWriteStream('example.car'))
+
+ * // store a new block, creates a new file entry in the CAR archive
+ * await writer.put({ cid, bytes })
+ * await writer.close()
+
+ * const inStream = fs.createReadStream('example.car')
+ * // read and parse the entire stream in one go, this will cache the contents of
+ * // the car in memory so is not suitable for large files.
+ * const reader = await CarReader.fromIterable(inStream)
+ * return reader
+ * }
+ *
+ * const car = await getCar()
+ * const cid = await client.putCar(car)
+ * ```
+ * @param {import('@ipld/car/api').CarReader} car
+ * @param {PutCarOptions} [options]
+ */
+ putCar (car, options) {
+ return Web3Storage.putCar(this, car, options)
+ }
+
+ /**
+ * Fetch the Content Addressed Archive by its root CID.
* @param {CIDString} cid
*/
get (cid) {
@@ -28658,6 +26476,26 @@ class Web3Storage {
status (cid) {
return Web3Storage.status(this, cid)
}
+
+ /**
+ * Find all uploads for this account. Use a `for await...of` loop to fetch them all.
+ * @example
+ * Fetch all the uploads
+ * ```js
+ * const uploads = []
+ * for await (const item of client.list()) {
+ * uploads.push(item)
+ * }
+ * ```
+ * @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
+ * @param {object} [opts]
+ * @param {string} [opts.before] list items uploaded before this ISO 8601 date string
+ * @param {number} [opts.maxResults] maximum number of results to return
+ * @returns {AsyncIterable}
+ */
+ list (opts) {
+ return Web3Storage.list(this, opts)
+ }
}
/**
@@ -28695,6 +26533,9 @@ function toFilenameWithPath (unixFsPath) {
function toWeb3Response (res) {
const response = Object.assign(res, {
unixFsIterator: async function * () {
+ if (!res.ok) {
+ throw new Error(`Response was not ok: ${res.status} ${res.statusText} - Check for { "ok": false } on the Response object before calling .unixFsIterator`)
+ }
/* c8 ignore next 3 */
if (!res.body) {
throw new Error('No body on response')
@@ -28709,6 +26550,9 @@ function toWeb3Response (res) {
}
},
files: async () => {
+ if (!res.ok) {
+ throw new Error(`Response was not ok: ${res.status} ${res.statusText} - Check for { "ok": false } on the Response object before calling .files`)
+ }
const files = [];
// @ts-ignore we're using the enriched response here
for await (const entry of response.unixFsIterator()) {
@@ -28724,6 +26568,26 @@ function toWeb3Response (res) {
return response
}
+/**
+ * Follow Link headers on a Response, to fetch all the things.
+ *
+ * @param {(service: Service, opts: any) => Promise} fn
+ * @param {Service} service
+ * @param {{}} opts
+ */
+async function * paginator (fn, service, opts) {
+ let res = await fn(service, opts);
+ yield res;
+ let link = parseLink__default['default'](res.headers.get('Link') || '');
+ // @ts-ignore
+ while (link && link.next) {
+ // @ts-ignore
+ res = await fn(service, link.next);
+ yield res;
+ link = parseLink__default['default'](res.headers.get('Link') || '');
+ }
+}
+
Object.defineProperty(exports, "filesFromPath", ({
enumerable: true,
get: function () {
@@ -28752,6 +26616,14 @@ exports.Web3Storage = Web3Storage;
//# sourceMappingURL=lib.cjs.map
+/***/ }),
+
+/***/ 2357:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("assert");
+
/***/ }),
/***/ 4293:
@@ -28770,6 +26642,14 @@ module.exports = require("crypto");
/***/ }),
+/***/ 8614:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("events");
+
+/***/ }),
+
/***/ 5747:
/***/ ((module) => {
@@ -28794,6 +26674,14 @@ module.exports = require("https");
/***/ }),
+/***/ 1631:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("net");
+
+/***/ }),
+
/***/ 2087:
/***/ ((module) => {
@@ -28810,6 +26698,14 @@ module.exports = require("path");
/***/ }),
+/***/ 1191:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("querystring");
+
+/***/ }),
+
/***/ 2413:
/***/ ((module) => {
@@ -28818,6 +26714,14 @@ module.exports = require("stream");
/***/ }),
+/***/ 4016:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("tls");
+
+/***/ }),
+
/***/ 8835:
/***/ ((module) => {
diff --git a/dist/index.js.map b/dist/index.js.map
index 3cdd8cd..e7431bd 100644
--- a/dist/index.js.map
+++ b/dist/index.js.map
@@ -1 +1 @@
-{"version":3,"file":"index.js","sources":["../webpack://add-to-web3/./node_modules/@actions/core/lib/command.js","../webpack://add-to-web3/./node_modules/@actions/core/lib/core.js","../webpack://add-to-web3/./node_modules/@actions/core/lib/file-command.js","../webpack://add-to-web3/./node_modules/@actions/core/lib/utils.js","../webpack://add-to-web3/./node_modules/@assemblyscript/loader/index.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/car.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/decoder.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/encoder.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/indexed-reader.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/indexer.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/iterator-channel.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/iterator.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/reader-browser.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/reader.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/writer-browser.js","../webpack://add-to-web3/./node_modules/@ipld/car/cjs/lib/writer.js","../webpack://add-to-web3/./node_modules/@ipld/dag-cbor/cjs/index.js","../webpack://add-to-web3/./node_modules/@ipld/dag-pb/cjs/src/index.js","../webpack://add-to-web3/./node_modules/@ipld/dag-pb/cjs/src/pb-decode.js","../webpack://add-to-web3/./node_modules/@ipld/dag-pb/cjs/src/pb-encode.js","../webpack://add-to-web3/./node_modules/@ipld/dag-pb/cjs/src/util.js","../webpack://add-to-web3/./node_modules/@protobufjs/aspromise/index.js","../webpack://add-to-web3/./node_modules/@protobufjs/base64/index.js","../webpack://add-to-web3/./node_modules/@protobufjs/eventemitter/index.js","../webpack://add-to-web3/./node_modules/@protobufjs/float/index.js","../webpack://add-to-web3/./node_modules/@protobufjs/inquire/index.js","../webpack://add-to-web3/./node_modules/@protobufjs/pool/index.js","../webpack://add-to-web3/./node_modules/@protobufjs/utf8/index.js","../webpack://add-to-web3/./node_modules/balanced-match/index.js","../webpack://add-to-web3/./node_modules/blob-to-it/index.js","../webpack://add-to-web3/./node_modules/brace-expansion/index.js","../webpack://add-to-web3/./node_modules/browser-readablestream-to-it/index.js","../webpack://add-to-web3/./node_modules/carbites/cjs/lib/treewalk/index.js","../webpack://add-to-web3/./node_modules/carbites/cjs/lib/treewalk/joiner.js","../webpack://add-to-web3/./node_modules/carbites/cjs/lib/treewalk/splitter.js","../webpack://add-to-web3/./node_modules/cborg/cjs/cborg.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/0uint.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/1negint.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/2bytes.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/3string.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/4array.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/5map.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/6tag.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/7float.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/bl.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/byte-utils.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/common.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/decode.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/encode.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/is.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/jump.js","../webpack://add-to-web3/./node_modules/cborg/cjs/lib/token.js","../webpack://add-to-web3/./node_modules/concat-map/index.js","../webpack://add-to-web3/./node_modules/data-uri-to-buffer/dist/src/index.js","../webpack://add-to-web3/./node_modules/err-code/index.js","../webpack://add-to-web3/./node_modules/files-from-path/cjs/src/index.js","../webpack://add-to-web3/./node_modules/hamt-sharding/src/bucket.js","../webpack://add-to-web3/./node_modules/hamt-sharding/src/consumable-buffer.js","../webpack://add-to-web3/./node_modules/hamt-sharding/src/consumable-hash.js","../webpack://add-to-web3/./node_modules/hamt-sharding/src/index.js","../webpack://add-to-web3/./node_modules/interface-blockstore/src/adapter.js","../webpack://add-to-web3/./node_modules/interface-blockstore/src/errors.js","../webpack://add-to-web3/./node_modules/interface-blockstore/src/index.js","../webpack://add-to-web3/./node_modules/interface-blockstore/src/memory.js","../webpack://add-to-web3/./node_modules/ipfs-car/dist/cjs/blockstore/fs.js","../webpack://add-to-web3/./node_modules/ipfs-car/dist/cjs/blockstore/memory.js","../webpack://add-to-web3/./node_modules/ipfs-car/dist/cjs/pack/constants.js","../webpack://add-to-web3/./node_modules/ipfs-car/dist/cjs/pack/index.js","../webpack://add-to-web3/./node_modules/ipfs-car/dist/cjs/unpack/index.js","../webpack://add-to-web3/./node_modules/ipfs-car/dist/cjs/unpack/utils/verifying-get-only-blockstore.js","../webpack://add-to-web3/./node_modules/ipfs-core-utils/src/files/normalise-input/index.js","../webpack://add-to-web3/./node_modules/ipfs-core-utils/src/files/normalise-input/normalise-content.js","../webpack://add-to-web3/./node_modules/ipfs-core-utils/src/files/normalise-input/normalise-input.js","../webpack://add-to-web3/./node_modules/ipfs-core-utils/src/files/normalise-input/utils.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/node_modules/ipfs-unixfs/src/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/node_modules/ipfs-unixfs/src/unixfs.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/resolvers/identity.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/resolvers/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/resolvers/raw.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/utils/extract-data-from-block.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/node_modules/bl/BufferList.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/node_modules/ipfs-unixfs/src/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/node_modules/ipfs-unixfs/src/unixfs.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/chunker/fixed-size.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/chunker/rabin.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dag-builder/dir.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dag-builder/file/balanced.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dag-builder/file/flat.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dag-builder/file/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dag-builder/file/trickle.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dag-builder/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dag-builder/validate-chunks.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dir-flat.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dir-sharded.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/dir.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/flat-to-shard.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/options.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/tree-builder.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/utils/persist.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs-importer/src/utils/to-path-components.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs/src/index.js","../webpack://add-to-web3/./node_modules/ipfs-unixfs/src/unixfs.js","../webpack://add-to-web3/./node_modules/is-plain-obj/index.js","../webpack://add-to-web3/./node_modules/it-all/index.js","../webpack://add-to-web3/./node_modules/it-batch/index.js","../webpack://add-to-web3/./node_modules/it-drain/index.js","../webpack://add-to-web3/./node_modules/it-filter/index.js","../webpack://add-to-web3/./node_modules/it-glob/index.js","../webpack://add-to-web3/./node_modules/it-last/index.js","../webpack://add-to-web3/./node_modules/it-map/index.js","../webpack://add-to-web3/./node_modules/it-parallel-batch/index.js","../webpack://add-to-web3/./node_modules/it-peekable/index.js","../webpack://add-to-web3/./node_modules/it-pipe/index.js","../webpack://add-to-web3/./node_modules/it-take/index.js","../webpack://add-to-web3/./node_modules/merge-options/index.js","../webpack://add-to-web3/./node_modules/minimatch/minimatch.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base10.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base16.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base2.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base32.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base36.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base58.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base64.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/base8.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bases/identity.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/basics.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/block.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/bytes.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/cid.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/codecs/json.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/codecs/raw.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/hashes/digest.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/hashes/hasher.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/hashes/identity.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/hashes/sha2.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/index.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/src/varint.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/vendor/base-x.js","../webpack://add-to-web3/./node_modules/multiformats/cjs/vendor/varint.js","../webpack://add-to-web3/./node_modules/murmurhash3js-revisited/index.js","../webpack://add-to-web3/./node_modules/murmurhash3js-revisited/lib/murmurHash3js.js","../webpack://add-to-web3/./node_modules/p-retry/index.js","../webpack://add-to-web3/./node_modules/protobufjs/minimal.js","../webpack://add-to-web3/./node_modules/protobufjs/src/index-minimal.js","../webpack://add-to-web3/./node_modules/protobufjs/src/reader.js","../webpack://add-to-web3/./node_modules/protobufjs/src/reader_buffer.js","../webpack://add-to-web3/./node_modules/protobufjs/src/roots.js","../webpack://add-to-web3/./node_modules/protobufjs/src/rpc.js","../webpack://add-to-web3/./node_modules/protobufjs/src/rpc/service.js","../webpack://add-to-web3/./node_modules/protobufjs/src/util/longbits.js","../webpack://add-to-web3/./node_modules/protobufjs/src/util/minimal.js","../webpack://add-to-web3/./node_modules/protobufjs/src/writer.js","../webpack://add-to-web3/./node_modules/protobufjs/src/writer_buffer.js","../webpack://add-to-web3/./node_modules/rabin-wasm/dist/rabin-wasm.node.js","../webpack://add-to-web3/./node_modules/rabin-wasm/src/index.js","../webpack://add-to-web3/./node_modules/rabin-wasm/src/rabin.js","../webpack://add-to-web3/./node_modules/retry/index.js","../webpack://add-to-web3/./node_modules/retry/lib/retry.js","../webpack://add-to-web3/./node_modules/retry/lib/retry_operation.js","../webpack://add-to-web3/./node_modules/sparse-array/index.js","../webpack://add-to-web3/./node_modules/streaming-iterables/dist/index.js","../webpack://add-to-web3/./node_modules/uint8arrays/compare.js","../webpack://add-to-web3/./node_modules/uint8arrays/concat.js","../webpack://add-to-web3/./node_modules/uint8arrays/equals.js","../webpack://add-to-web3/./node_modules/uint8arrays/from-string.js","../webpack://add-to-web3/./node_modules/uint8arrays/index.js","../webpack://add-to-web3/./node_modules/uint8arrays/to-string.js","../webpack://add-to-web3/./node_modules/uint8arrays/util/bases.js","../webpack://add-to-web3/./node_modules/uint8arrays/xor.js","../webpack://add-to-web3/./node_modules/varint/decode.js","../webpack://add-to-web3/./node_modules/varint/encode.js","../webpack://add-to-web3/./node_modules/varint/index.js","../webpack://add-to-web3/./node_modules/varint/length.js","../webpack://add-to-web3/./node_modules/web-encoding/src/lib.js","../webpack://add-to-web3/./node_modules/web-streams-polyfill/dist/polyfill.js","../webpack://add-to-web3/./node_modules/web3.storage/node_modules/files-from-path/cjs/src/index.js","../webpack://add-to-web3/./web3.js","../webpack://add-to-web3/./node_modules/@web-std/blob/dist/src/lib.node.cjs","../webpack://add-to-web3/./node_modules/@web-std/blob/dist/src/package.cjs","../webpack://add-to-web3/./node_modules/@web-std/fetch/dist/index.cjs","../webpack://add-to-web3/./node_modules/@web-std/file/dist/src/lib.node.cjs","../webpack://add-to-web3/./node_modules/web3.storage/dist/src/lib.cjs","../webpack://add-to-web3/external \"buffer\"","../webpack://add-to-web3/external \"crypto\"","../webpack://add-to-web3/external \"fs\"","../webpack://add-to-web3/external \"http\"","../webpack://add-to-web3/external \"https\"","../webpack://add-to-web3/external \"os\"","../webpack://add-to-web3/external \"path\"","../webpack://add-to-web3/external \"stream\"","../webpack://add-to-web3/external \"url\"","../webpack://add-to-web3/external \"util\"","../webpack://add-to-web3/external \"zlib\"","../webpack://add-to-web3/webpack/bootstrap","../webpack://add-to-web3/webpack/runtime/compat","../webpack://add-to-web3/./index.js"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\n\n// Runtime header offsets\nconst ID_OFFSET = -8;\nconst SIZE_OFFSET = -4;\n\n// Runtime ids\nconst ARRAYBUFFER_ID = 0;\nconst STRING_ID = 1;\nconst ARRAYBUFFERVIEW_ID = 2;\n\n// Runtime type information\nconst ARRAYBUFFERVIEW = 1 << 0;\nconst ARRAY = 1 << 1;\nconst SET = 1 << 2;\nconst MAP = 1 << 3;\nconst VAL_ALIGN_OFFSET = 5;\nconst VAL_ALIGN = 1 << VAL_ALIGN_OFFSET;\nconst VAL_SIGNED = 1 << 10;\nconst VAL_FLOAT = 1 << 11;\nconst VAL_NULLABLE = 1 << 12;\nconst VAL_MANAGED = 1 << 13;\nconst KEY_ALIGN_OFFSET = 14;\nconst KEY_ALIGN = 1 << KEY_ALIGN_OFFSET;\nconst KEY_SIGNED = 1 << 19;\nconst KEY_FLOAT = 1 << 20;\nconst KEY_NULLABLE = 1 << 21;\nconst KEY_MANAGED = 1 << 22;\n\n// Array(BufferView) layout\nconst ARRAYBUFFERVIEW_BUFFER_OFFSET = 0;\nconst ARRAYBUFFERVIEW_DATASTART_OFFSET = 4;\nconst ARRAYBUFFERVIEW_DATALENGTH_OFFSET = 8;\nconst ARRAYBUFFERVIEW_SIZE = 12;\nconst ARRAY_LENGTH_OFFSET = 12;\nconst ARRAY_SIZE = 16;\n\nconst BIGINT = typeof BigUint64Array !== \"undefined\";\nconst THIS = Symbol();\nconst CHUNKSIZE = 1024;\n\n/** Gets a string from an U32 and an U16 view on a memory. */\nfunction getStringImpl(buffer, ptr) {\n const U32 = new Uint32Array(buffer);\n const U16 = new Uint16Array(buffer);\n var length = U32[(ptr + SIZE_OFFSET) >>> 2] >>> 1;\n var offset = ptr >>> 1;\n if (length <= CHUNKSIZE) return String.fromCharCode.apply(String, U16.subarray(offset, offset + length));\n const parts = [];\n do {\n const last = U16[offset + CHUNKSIZE - 1];\n const size = last >= 0xD800 && last < 0xDC00 ? CHUNKSIZE - 1 : CHUNKSIZE;\n parts.push(String.fromCharCode.apply(String, U16.subarray(offset, offset += size)));\n length -= size;\n } while (length > CHUNKSIZE);\n return parts.join(\"\") + String.fromCharCode.apply(String, U16.subarray(offset, offset + length));\n}\n\n/** Prepares the base module prior to instantiation. */\nfunction preInstantiate(imports) {\n const baseModule = {};\n\n function getString(memory, ptr) {\n if (!memory) return \"\";\n return getStringImpl(memory.buffer, ptr);\n }\n\n // add common imports used by stdlib for convenience\n const env = (imports.env = imports.env || {});\n env.abort = env.abort || function abort(mesg, file, line, colm) {\n const memory = baseModule.memory || env.memory; // prefer exported, otherwise try imported\n throw Error(\"abort: \" + getString(memory, mesg) + \" at \" + getString(memory, file) + \":\" + line + \":\" + colm);\n }\n env.trace = env.trace || function trace(mesg, n) {\n const memory = baseModule.memory || env.memory;\n console.log(\"trace: \" + getString(memory, mesg) + (n ? \" \" : \"\") + Array.prototype.slice.call(arguments, 2, 2 + n).join(\", \"));\n }\n imports.Math = imports.Math || Math;\n imports.Date = imports.Date || Date;\n\n return baseModule;\n}\n\n/** Prepares the final module once instantiation is complete. */\nfunction postInstantiate(baseModule, instance) {\n const rawExports = instance.exports;\n const memory = rawExports.memory;\n const table = rawExports.table;\n const alloc = rawExports[\"__alloc\"];\n const retain = rawExports[\"__retain\"];\n const rttiBase = rawExports[\"__rtti_base\"] || ~0; // oob if not present\n\n /** Gets the runtime type info for the given id. */\n function getInfo(id) {\n const U32 = new Uint32Array(memory.buffer);\n const count = U32[rttiBase >>> 2];\n if ((id >>>= 0) >= count) throw Error(\"invalid id: \" + id);\n return U32[(rttiBase + 4 >>> 2) + id * 2];\n }\n\n /** Gets the runtime base id for the given id. */\n function getBase(id) {\n const U32 = new Uint32Array(memory.buffer);\n const count = U32[rttiBase >>> 2];\n if ((id >>>= 0) >= count) throw Error(\"invalid id: \" + id);\n return U32[(rttiBase + 4 >>> 2) + id * 2 + 1];\n }\n\n /** Gets the runtime alignment of a collection's values. */\n function getValueAlign(info) {\n return 31 - Math.clz32((info >>> VAL_ALIGN_OFFSET) & 31); // -1 if none\n }\n\n /** Gets the runtime alignment of a collection's keys. */\n function getKeyAlign(info) {\n return 31 - Math.clz32((info >>> KEY_ALIGN_OFFSET) & 31); // -1 if none\n }\n\n /** Allocates a new string in the module's memory and returns its retained pointer. */\n function __allocString(str) {\n const length = str.length;\n const ptr = alloc(length << 1, STRING_ID);\n const U16 = new Uint16Array(memory.buffer);\n for (var i = 0, p = ptr >>> 1; i < length; ++i) U16[p + i] = str.charCodeAt(i);\n return ptr;\n }\n\n baseModule.__allocString = __allocString;\n\n /** Reads a string from the module's memory by its pointer. */\n function __getString(ptr) {\n const buffer = memory.buffer;\n const id = new Uint32Array(buffer)[ptr + ID_OFFSET >>> 2];\n if (id !== STRING_ID) throw Error(\"not a string: \" + ptr);\n return getStringImpl(buffer, ptr);\n }\n\n baseModule.__getString = __getString;\n\n /** Gets the view matching the specified alignment, signedness and floatness. */\n function getView(alignLog2, signed, float) {\n const buffer = memory.buffer;\n if (float) {\n switch (alignLog2) {\n case 2: return new Float32Array(buffer);\n case 3: return new Float64Array(buffer);\n }\n } else {\n switch (alignLog2) {\n case 0: return new (signed ? Int8Array : Uint8Array)(buffer);\n case 1: return new (signed ? Int16Array : Uint16Array)(buffer);\n case 2: return new (signed ? Int32Array : Uint32Array)(buffer);\n case 3: return new (signed ? BigInt64Array : BigUint64Array)(buffer);\n }\n }\n throw Error(\"unsupported align: \" + alignLog2);\n }\n\n /** Allocates a new array in the module's memory and returns its retained pointer. */\n function __allocArray(id, values) {\n const info = getInfo(id);\n if (!(info & (ARRAYBUFFERVIEW | ARRAY))) throw Error(\"not an array: \" + id + \" @ \" + info);\n const align = getValueAlign(info);\n const length = values.length;\n const buf = alloc(length << align, ARRAYBUFFER_ID);\n const arr = alloc(info & ARRAY ? ARRAY_SIZE : ARRAYBUFFERVIEW_SIZE, id);\n const U32 = new Uint32Array(memory.buffer);\n U32[arr + ARRAYBUFFERVIEW_BUFFER_OFFSET >>> 2] = retain(buf);\n U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2] = buf;\n U32[arr + ARRAYBUFFERVIEW_DATALENGTH_OFFSET >>> 2] = length << align;\n if (info & ARRAY) U32[arr + ARRAY_LENGTH_OFFSET >>> 2] = length;\n const view = getView(align, info & VAL_SIGNED, info & VAL_FLOAT);\n if (info & VAL_MANAGED) {\n for (let i = 0; i < length; ++i) view[(buf >>> align) + i] = retain(values[i]);\n } else {\n view.set(values, buf >>> align);\n }\n return arr;\n }\n\n baseModule.__allocArray = __allocArray;\n\n /** Gets a live view on an array's values in the module's memory. Infers the array type from RTTI. */\n function __getArrayView(arr) {\n const U32 = new Uint32Array(memory.buffer);\n const id = U32[arr + ID_OFFSET >>> 2];\n const info = getInfo(id);\n if (!(info & ARRAYBUFFERVIEW)) throw Error(\"not an array: \" + id);\n const align = getValueAlign(info);\n var buf = U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2];\n const length = info & ARRAY\n ? U32[arr + ARRAY_LENGTH_OFFSET >>> 2]\n : U32[buf + SIZE_OFFSET >>> 2] >>> align;\n return getView(align, info & VAL_SIGNED, info & VAL_FLOAT)\n .subarray(buf >>>= align, buf + length);\n }\n\n baseModule.__getArrayView = __getArrayView;\n\n /** Copies an array's values from the module's memory. Infers the array type from RTTI. */\n function __getArray(arr) {\n const input = __getArrayView(arr);\n const len = input.length;\n const out = new Array(len);\n for (let i = 0; i < len; i++) out[i] = input[i];\n return out;\n }\n\n baseModule.__getArray = __getArray;\n\n /** Copies an ArrayBuffer's value from the module's memory. */\n function __getArrayBuffer(ptr) {\n const buffer = memory.buffer;\n const length = new Uint32Array(buffer)[ptr + SIZE_OFFSET >>> 2];\n return buffer.slice(ptr, ptr + length);\n }\n\n baseModule.__getArrayBuffer = __getArrayBuffer;\n\n /** Copies a typed array's values from the module's memory. */\n function getTypedArray(Type, alignLog2, ptr) {\n return new Type(getTypedArrayView(Type, alignLog2, ptr));\n }\n\n /** Gets a live view on a typed array's values in the module's memory. */\n function getTypedArrayView(Type, alignLog2, ptr) {\n const buffer = memory.buffer;\n const U32 = new Uint32Array(buffer);\n const bufPtr = U32[ptr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2];\n return new Type(buffer, bufPtr, U32[bufPtr + SIZE_OFFSET >>> 2] >>> alignLog2);\n }\n\n baseModule.__getInt8Array = getTypedArray.bind(null, Int8Array, 0);\n baseModule.__getInt8ArrayView = getTypedArrayView.bind(null, Int8Array, 0);\n baseModule.__getUint8Array = getTypedArray.bind(null, Uint8Array, 0);\n baseModule.__getUint8ArrayView = getTypedArrayView.bind(null, Uint8Array, 0);\n baseModule.__getUint8ClampedArray = getTypedArray.bind(null, Uint8ClampedArray, 0);\n baseModule.__getUint8ClampedArrayView = getTypedArrayView.bind(null, Uint8ClampedArray, 0);\n baseModule.__getInt16Array = getTypedArray.bind(null, Int16Array, 1);\n baseModule.__getInt16ArrayView = getTypedArrayView.bind(null, Int16Array, 1);\n baseModule.__getUint16Array = getTypedArray.bind(null, Uint16Array, 1);\n baseModule.__getUint16ArrayView = getTypedArrayView.bind(null, Uint16Array, 1);\n baseModule.__getInt32Array = getTypedArray.bind(null, Int32Array, 2);\n baseModule.__getInt32ArrayView = getTypedArrayView.bind(null, Int32Array, 2);\n baseModule.__getUint32Array = getTypedArray.bind(null, Uint32Array, 2);\n baseModule.__getUint32ArrayView = getTypedArrayView.bind(null, Uint32Array, 2);\n if (BIGINT) {\n baseModule.__getInt64Array = getTypedArray.bind(null, BigInt64Array, 3);\n baseModule.__getInt64ArrayView = getTypedArrayView.bind(null, BigInt64Array, 3);\n baseModule.__getUint64Array = getTypedArray.bind(null, BigUint64Array, 3);\n baseModule.__getUint64ArrayView = getTypedArrayView.bind(null, BigUint64Array, 3);\n }\n baseModule.__getFloat32Array = getTypedArray.bind(null, Float32Array, 2);\n baseModule.__getFloat32ArrayView = getTypedArrayView.bind(null, Float32Array, 2);\n baseModule.__getFloat64Array = getTypedArray.bind(null, Float64Array, 3);\n baseModule.__getFloat64ArrayView = getTypedArrayView.bind(null, Float64Array, 3);\n\n /** Tests whether an object is an instance of the class represented by the specified base id. */\n function __instanceof(ptr, baseId) {\n const U32 = new Uint32Array(memory.buffer);\n var id = U32[(ptr + ID_OFFSET) >>> 2];\n if (id <= U32[rttiBase >>> 2]) {\n do if (id == baseId) return true;\n while (id = getBase(id));\n }\n return false;\n }\n\n baseModule.__instanceof = __instanceof;\n\n // Pull basic exports to baseModule so code in preInstantiate can use them\n baseModule.memory = baseModule.memory || memory;\n baseModule.table = baseModule.table || table;\n\n // Demangle exports and provide the usual utility on the prototype\n return demangle(rawExports, baseModule);\n}\n\nfunction isResponse(o) {\n return typeof Response !== \"undefined\" && o instanceof Response;\n}\n\n/** Asynchronously instantiates an AssemblyScript module from anything that can be instantiated. */\nasync function instantiate(source, imports) {\n if (isResponse(source = await source)) return instantiateStreaming(source, imports);\n return postInstantiate(\n preInstantiate(imports || (imports = {})),\n await WebAssembly.instantiate(\n source instanceof WebAssembly.Module\n ? source\n : await WebAssembly.compile(source),\n imports\n )\n );\n}\n\nexports.instantiate = instantiate;\n\n/** Synchronously instantiates an AssemblyScript module from a WebAssembly.Module or binary buffer. */\nfunction instantiateSync(source, imports) {\n return postInstantiate(\n preInstantiate(imports || (imports = {})),\n new WebAssembly.Instance(\n source instanceof WebAssembly.Module\n ? source\n : new WebAssembly.Module(source),\n imports\n )\n )\n}\n\nexports.instantiateSync = instantiateSync;\n\n/** Asynchronously instantiates an AssemblyScript module from a response, i.e. as obtained by `fetch`. */\nasync function instantiateStreaming(source, imports) {\n if (!WebAssembly.instantiateStreaming) {\n return instantiate(\n isResponse(source = await source)\n ? source.arrayBuffer()\n : source,\n imports\n );\n }\n return postInstantiate(\n preInstantiate(imports || (imports = {})),\n (await WebAssembly.instantiateStreaming(source, imports)).instance\n );\n}\n\nexports.instantiateStreaming = instantiateStreaming;\n\n/** Demangles an AssemblyScript module's exports to a friendly object structure. */\nfunction demangle(exports, baseModule) {\n var module = baseModule ? Object.create(baseModule) : {};\n var setArgumentsLength = exports[\"__argumentsLength\"]\n ? function(length) { exports[\"__argumentsLength\"].value = length; }\n : exports[\"__setArgumentsLength\"] || exports[\"__setargc\"] || function() {};\n for (let internalName in exports) {\n if (!Object.prototype.hasOwnProperty.call(exports, internalName)) continue;\n const elem = exports[internalName];\n let parts = internalName.split(\".\");\n let curr = module;\n while (parts.length > 1) {\n let part = parts.shift();\n if (!Object.prototype.hasOwnProperty.call(curr, part)) curr[part] = {};\n curr = curr[part];\n }\n let name = parts[0];\n let hash = name.indexOf(\"#\");\n if (hash >= 0) {\n let className = name.substring(0, hash);\n let classElem = curr[className];\n if (typeof classElem === \"undefined\" || !classElem.prototype) {\n let ctor = function(...args) {\n return ctor.wrap(ctor.prototype.constructor(0, ...args));\n };\n ctor.prototype = {\n valueOf: function valueOf() {\n return this[THIS];\n }\n };\n ctor.wrap = function(thisValue) {\n return Object.create(ctor.prototype, { [THIS]: { value: thisValue, writable: false } });\n };\n if (classElem) Object.getOwnPropertyNames(classElem).forEach(name =>\n Object.defineProperty(ctor, name, Object.getOwnPropertyDescriptor(classElem, name))\n );\n curr[className] = ctor;\n }\n name = name.substring(hash + 1);\n curr = curr[className].prototype;\n if (/^(get|set):/.test(name)) {\n if (!Object.prototype.hasOwnProperty.call(curr, name = name.substring(4))) {\n let getter = exports[internalName.replace(\"set:\", \"get:\")];\n let setter = exports[internalName.replace(\"get:\", \"set:\")];\n Object.defineProperty(curr, name, {\n get: function() { return getter(this[THIS]); },\n set: function(value) { setter(this[THIS], value); },\n enumerable: true\n });\n }\n } else {\n if (name === 'constructor') {\n (curr[name] = (...args) => {\n setArgumentsLength(args.length);\n return elem(...args);\n }).original = elem;\n } else { // instance method\n (curr[name] = function(...args) { // !\n setArgumentsLength(args.length);\n return elem(this[THIS], ...args);\n }).original = elem;\n }\n }\n } else {\n if (/^(get|set):/.test(name)) {\n if (!Object.prototype.hasOwnProperty.call(curr, name = name.substring(4))) {\n Object.defineProperty(curr, name, {\n get: exports[internalName.replace(\"set:\", \"get:\")],\n set: exports[internalName.replace(\"get:\", \"set:\")],\n enumerable: true\n });\n }\n } else if (typeof elem === \"function\" && elem !== setArgumentsLength) {\n (curr[name] = (...args) => {\n setArgumentsLength(args.length);\n return elem(...args);\n }).original = elem;\n } else {\n curr[name] = elem;\n }\n }\n }\n return module;\n}\n\nexports.demangle = demangle;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar reader = require('./lib/reader.js');\nvar indexer = require('./lib/indexer.js');\nvar iterator = require('./lib/iterator.js');\nvar writer = require('./lib/writer.js');\nvar indexedReader = require('./lib/indexed-reader.js');\n\n\n\nexports.CarReader = reader.CarReader;\nexports.CarIndexer = indexer.CarIndexer;\nexports.CarBlockIterator = iterator.CarBlockIterator;\nexports.CarCIDIterator = iterator.CarCIDIterator;\nexports.CarWriter = writer.CarWriter;\nexports.CarIndexedReader = indexedReader.CarIndexedReader;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar varint = require('varint');\nvar cid = require('multiformats/cid');\nvar Digest = require('multiformats/hashes/digest');\nvar dagCbor = require('@ipld/dag-cbor');\n\nfunction _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }\n\nfunction _interopNamespace(e) {\n if (e && e.__esModule) return e;\n var n = Object.create(null);\n if (e) {\n Object.keys(e).forEach(function (k) {\n if (k !== 'default') {\n var d = Object.getOwnPropertyDescriptor(e, k);\n Object.defineProperty(n, k, d.get ? d : {\n enumerable: true,\n get: function () {\n return e[k];\n }\n });\n }\n });\n }\n n['default'] = e;\n return Object.freeze(n);\n}\n\nvar varint__default = /*#__PURE__*/_interopDefaultLegacy(varint);\nvar Digest__namespace = /*#__PURE__*/_interopNamespace(Digest);\n\nconst CIDV0_BYTES = {\n SHA2_256: 18,\n LENGTH: 32,\n DAG_PB: 112\n};\nasync function readVarint(reader) {\n const bytes = await reader.upTo(8);\n const i = varint__default['default'].decode(bytes);\n reader.seek(varint__default['default'].decode.bytes);\n return i;\n}\nasync function readHeader(reader) {\n const length = await readVarint(reader);\n const header = await reader.exactly(length);\n reader.seek(length);\n const block = dagCbor.decode(header);\n if (block == null || Array.isArray(block) || typeof block !== 'object') {\n throw new Error('Invalid CAR header format');\n }\n if (block.version !== 1) {\n if (typeof block.version === 'string') {\n throw new Error(`Invalid CAR version: \"${ block.version }\"`);\n }\n throw new Error(`Invalid CAR version: ${ block.version }`);\n }\n if (!Array.isArray(block.roots)) {\n throw new Error('Invalid CAR header format');\n }\n if (Object.keys(block).filter(p => p !== 'roots' && p !== 'version').length) {\n throw new Error('Invalid CAR header format');\n }\n return block;\n}\nasync function readMultihash(reader) {\n const bytes = await reader.upTo(8);\n varint__default['default'].decode(bytes);\n const codeLength = varint__default['default'].decode.bytes;\n const length = varint__default['default'].decode(bytes.subarray(varint__default['default'].decode.bytes));\n const lengthLength = varint__default['default'].decode.bytes;\n const mhLength = codeLength + lengthLength + length;\n const multihash = await reader.exactly(mhLength);\n reader.seek(mhLength);\n return multihash;\n}\nasync function readCid(reader) {\n const first = await reader.exactly(2);\n if (first[0] === CIDV0_BYTES.SHA2_256 && first[1] === CIDV0_BYTES.LENGTH) {\n const bytes = await reader.exactly(34);\n reader.seek(34);\n const multihash = Digest__namespace.decode(bytes);\n return cid.CID.create(0, CIDV0_BYTES.DAG_PB, multihash);\n }\n const version = await readVarint(reader);\n if (version !== 1) {\n throw new Error(`Unexpected CID version (${ version })`);\n }\n const codec = await readVarint(reader);\n const bytes = await readMultihash(reader);\n const multihash = Digest__namespace.decode(bytes);\n return cid.CID.create(version, codec, multihash);\n}\nasync function readBlockHead(reader) {\n const start = reader.pos;\n const length = await readVarint(reader) + (reader.pos - start);\n const cid = await readCid(reader);\n const blockLength = length - (reader.pos - start);\n return {\n cid,\n length,\n blockLength\n };\n}\nasync function readBlock(reader) {\n const {cid, blockLength} = await readBlockHead(reader);\n const bytes = await reader.exactly(blockLength);\n reader.seek(blockLength);\n return {\n bytes,\n cid\n };\n}\nasync function readBlockIndex(reader) {\n const offset = reader.pos;\n const {cid, length, blockLength} = await readBlockHead(reader);\n const index = {\n cid,\n length,\n blockLength,\n offset,\n blockOffset: reader.pos\n };\n reader.seek(index.blockLength);\n return index;\n}\nfunction createDecoder(reader) {\n const headerPromise = readHeader(reader);\n return {\n header: () => headerPromise,\n async *blocks() {\n await headerPromise;\n while ((await reader.upTo(8)).length > 0) {\n yield await readBlock(reader);\n }\n },\n async *blocksIndex() {\n await headerPromise;\n while ((await reader.upTo(8)).length > 0) {\n yield await readBlockIndex(reader);\n }\n }\n };\n}\nfunction bytesReader(bytes) {\n let pos = 0;\n return {\n async upTo(length) {\n return bytes.subarray(pos, pos + Math.min(length, bytes.length - pos));\n },\n async exactly(length) {\n if (length > bytes.length - pos) {\n throw new Error('Unexpected end of data');\n }\n return bytes.subarray(pos, pos + length);\n },\n seek(length) {\n pos += length;\n },\n get pos() {\n return pos;\n }\n };\n}\nfunction chunkReader(readChunk) {\n let pos = 0;\n let have = 0;\n let offset = 0;\n let currentChunk = new Uint8Array(0);\n const read = async length => {\n have = currentChunk.length - offset;\n const bufa = [currentChunk.subarray(offset)];\n while (have < length) {\n const chunk = await readChunk();\n if (chunk.length === 0) {\n break;\n }\n if (have < 0) {\n if (chunk.length > have) {\n bufa.push(chunk.subarray(-have));\n }\n } else {\n bufa.push(chunk);\n }\n have += chunk.length;\n }\n currentChunk = new Uint8Array(bufa.reduce((p, c) => p + c.length, 0));\n let off = 0;\n for (const b of bufa) {\n currentChunk.set(b, off);\n off += b.length;\n }\n offset = 0;\n };\n return {\n async upTo(length) {\n if (currentChunk.length - offset < length) {\n await read(length);\n }\n return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length));\n },\n async exactly(length) {\n if (currentChunk.length - offset < length) {\n await read(length);\n }\n if (currentChunk.length - offset < length) {\n throw new Error('Unexpected end of data');\n }\n return currentChunk.subarray(offset, offset + length);\n },\n seek(length) {\n pos += length;\n offset += length;\n },\n get pos() {\n return pos;\n }\n };\n}\nfunction asyncIterableReader(asyncIterable) {\n const iterator = asyncIterable[Symbol.asyncIterator]();\n async function readChunk() {\n const next = await iterator.next();\n if (next.done) {\n return new Uint8Array(0);\n }\n return next.value;\n }\n return chunkReader(readChunk);\n}\n\nexports.asyncIterableReader = asyncIterableReader;\nexports.bytesReader = bytesReader;\nexports.chunkReader = chunkReader;\nexports.createDecoder = createDecoder;\nexports.readHeader = readHeader;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar varint = require('varint');\nvar dagCbor = require('@ipld/dag-cbor');\n\nfunction _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }\n\nvar varint__default = /*#__PURE__*/_interopDefaultLegacy(varint);\n\nfunction createHeader(roots) {\n const headerBytes = dagCbor.encode({\n version: 1,\n roots\n });\n const varintBytes = varint__default['default'].encode(headerBytes.length);\n const header = new Uint8Array(varintBytes.length + headerBytes.length);\n header.set(varintBytes, 0);\n header.set(headerBytes, varintBytes.length);\n return header;\n}\nfunction createEncoder(writer) {\n return {\n async setRoots(roots) {\n const bytes = createHeader(roots);\n await writer.write(bytes);\n },\n async writeBlock(block) {\n const {cid, bytes} = block;\n await writer.write(new Uint8Array(varint__default['default'].encode(cid.bytes.length + bytes.length)));\n await writer.write(cid.bytes);\n await writer.write(bytes);\n },\n async close() {\n return writer.end();\n }\n };\n}\n\nexports.createEncoder = createEncoder;\nexports.createHeader = createHeader;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar fs = require('fs');\nvar stream = require('stream');\nvar cid = require('multiformats/cid');\nvar indexer = require('./indexer.js');\nvar reader = require('./reader.js');\n\nfunction _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }\n\nvar fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);\n\nclass CarIndexedReader {\n constructor(version, path, roots, index, order) {\n this._version = version;\n this._path = path;\n this._roots = roots;\n this._index = index;\n this._order = order;\n this._fd = null;\n }\n get version() {\n return this._version;\n }\n async getRoots() {\n return this._roots;\n }\n async has(key) {\n return this._index.has(key.toString());\n }\n async get(key) {\n const blockIndex = this._index.get(key.toString());\n if (!blockIndex) {\n return undefined;\n }\n if (!this._fd) {\n this._fd = await fs__default['default'].promises.open(this._path, 'r');\n }\n const readIndex = {\n cid: key,\n length: 0,\n offset: 0,\n blockLength: blockIndex.blockLength,\n blockOffset: blockIndex.blockOffset\n };\n return reader.CarReader.readRaw(this._fd, readIndex);\n }\n async *blocks() {\n for (const cidStr of this._order) {\n const block = await this.get(cid.CID.parse(cidStr));\n if (!block) {\n throw new Error('Unexpected internal error');\n }\n yield block;\n }\n }\n async *cids() {\n for (const cidStr of this._order) {\n yield cid.CID.parse(cidStr);\n }\n }\n async close() {\n if (this._fd) {\n return this._fd.close();\n }\n }\n static async fromFile(path) {\n if (typeof path !== 'string') {\n throw new TypeError('fromFile() requires a file path string');\n }\n const iterable = await indexer.CarIndexer.fromIterable(stream.Readable.from(fs__default['default'].createReadStream(path)));\n const index = new Map();\n const order = [];\n for await (const {cid, blockLength, blockOffset} of iterable) {\n const cidStr = cid.toString();\n index.set(cidStr, {\n blockLength,\n blockOffset\n });\n order.push(cidStr);\n }\n return new CarIndexedReader(iterable.version, path, await iterable.getRoots(), index, order);\n }\n}\nconst __browser = false;\n\nexports.CarIndexedReader = CarIndexedReader;\nexports.__browser = __browser;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar decoder = require('./decoder.js');\n\nclass CarIndexer {\n constructor(version, roots, iterator) {\n this._version = version;\n this._roots = roots;\n this._iterator = iterator;\n }\n get version() {\n return this._version;\n }\n async getRoots() {\n return this._roots;\n }\n [Symbol.asyncIterator]() {\n return this._iterator;\n }\n static async fromBytes(bytes) {\n if (!(bytes instanceof Uint8Array)) {\n throw new TypeError('fromBytes() requires a Uint8Array');\n }\n return decodeIndexerComplete(decoder.bytesReader(bytes));\n }\n static async fromIterable(asyncIterable) {\n if (!asyncIterable || !(typeof asyncIterable[Symbol.asyncIterator] === 'function')) {\n throw new TypeError('fromIterable() requires an async iterable');\n }\n return decodeIndexerComplete(decoder.asyncIterableReader(asyncIterable));\n }\n}\nasync function decodeIndexerComplete(reader) {\n const decoder$1 = decoder.createDecoder(reader);\n const {version, roots} = await decoder$1.header();\n return new CarIndexer(version, roots, decoder$1.blocksIndex());\n}\n\nexports.CarIndexer = CarIndexer;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction noop() {\n}\nfunction create() {\n const chunkQueue = [];\n let drainer = null;\n let drainerResolver = noop;\n let ended = false;\n let outWait = null;\n let outWaitResolver = noop;\n const makeDrainer = () => {\n if (!drainer) {\n drainer = new Promise(resolve => {\n drainerResolver = () => {\n drainer = null;\n drainerResolver = noop;\n resolve();\n };\n });\n }\n return drainer;\n };\n const writer = {\n write(chunk) {\n chunkQueue.push(chunk);\n const drainer = makeDrainer();\n outWaitResolver();\n return drainer;\n },\n async end() {\n ended = true;\n const drainer = makeDrainer();\n outWaitResolver();\n return drainer;\n }\n };\n const iterator = {\n async next() {\n const chunk = chunkQueue.shift();\n if (chunk) {\n if (chunkQueue.length === 0) {\n drainerResolver();\n }\n return {\n done: false,\n value: chunk\n };\n }\n if (ended) {\n drainerResolver();\n return {\n done: true,\n value: undefined\n };\n }\n if (!outWait) {\n outWait = new Promise(resolve => {\n outWaitResolver = () => {\n outWait = null;\n outWaitResolver = noop;\n return resolve(iterator.next());\n };\n });\n }\n return outWait;\n }\n };\n return {\n writer,\n iterator\n };\n}\n\nexports.create = create;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar decoder = require('./decoder.js');\n\nclass CarIteratorBase {\n constructor(version, roots, iterable) {\n this._version = version;\n this._roots = roots;\n this._iterable = iterable;\n this._decoded = false;\n }\n get version() {\n return this._version;\n }\n async getRoots() {\n return this._roots;\n }\n}\nclass CarBlockIterator extends CarIteratorBase {\n [Symbol.asyncIterator]() {\n if (this._decoded) {\n throw new Error('Cannot decode more than once');\n }\n if (!this._iterable) {\n throw new Error('Block iterable not found');\n }\n this._decoded = true;\n return this._iterable[Symbol.asyncIterator]();\n }\n static async fromBytes(bytes) {\n const {version, roots, iterator} = await fromBytes(bytes);\n return new CarBlockIterator(version, roots, iterator);\n }\n static async fromIterable(asyncIterable) {\n const {version, roots, iterator} = await fromIterable(asyncIterable);\n return new CarBlockIterator(version, roots, iterator);\n }\n}\nclass CarCIDIterator extends CarIteratorBase {\n [Symbol.asyncIterator]() {\n if (this._decoded) {\n throw new Error('Cannot decode more than once');\n }\n if (!this._iterable) {\n throw new Error('Block iterable not found');\n }\n this._decoded = true;\n const iterable = this._iterable[Symbol.asyncIterator]();\n return {\n async next() {\n const next = await iterable.next();\n if (next.done) {\n return next;\n }\n return {\n done: false,\n value: next.value.cid\n };\n }\n };\n }\n static async fromBytes(bytes) {\n const {version, roots, iterator} = await fromBytes(bytes);\n return new CarCIDIterator(version, roots, iterator);\n }\n static async fromIterable(asyncIterable) {\n const {version, roots, iterator} = await fromIterable(asyncIterable);\n return new CarCIDIterator(version, roots, iterator);\n }\n}\nasync function fromBytes(bytes) {\n if (!(bytes instanceof Uint8Array)) {\n throw new TypeError('fromBytes() requires a Uint8Array');\n }\n return decodeIterator(decoder.bytesReader(bytes));\n}\nasync function fromIterable(asyncIterable) {\n if (!asyncIterable || !(typeof asyncIterable[Symbol.asyncIterator] === 'function')) {\n throw new TypeError('fromIterable() requires an async iterable');\n }\n return decodeIterator(decoder.asyncIterableReader(asyncIterable));\n}\nasync function decodeIterator(reader) {\n const decoder$1 = decoder.createDecoder(reader);\n const {version, roots} = await decoder$1.header();\n return {\n version,\n roots,\n iterator: decoder$1.blocks()\n };\n}\n\nexports.CarBlockIterator = CarBlockIterator;\nexports.CarCIDIterator = CarCIDIterator;\nexports.CarIteratorBase = CarIteratorBase;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar decoder = require('./decoder.js');\n\nclass CarReader {\n constructor(version, roots, blocks) {\n this._version = version;\n this._roots = roots;\n this._blocks = blocks;\n this._keys = blocks.map(b => b.cid.toString());\n }\n get version() {\n return this._version;\n }\n async getRoots() {\n return this._roots;\n }\n async has(key) {\n return this._keys.indexOf(key.toString()) > -1;\n }\n async get(key) {\n const index = this._keys.indexOf(key.toString());\n return index > -1 ? this._blocks[index] : undefined;\n }\n async *blocks() {\n for (const block of this._blocks) {\n yield block;\n }\n }\n async *cids() {\n for (const block of this._blocks) {\n yield block.cid;\n }\n }\n static async fromBytes(bytes) {\n if (!(bytes instanceof Uint8Array)) {\n throw new TypeError('fromBytes() requires a Uint8Array');\n }\n return decodeReaderComplete(decoder.bytesReader(bytes));\n }\n static async fromIterable(asyncIterable) {\n if (!asyncIterable || !(typeof asyncIterable[Symbol.asyncIterator] === 'function')) {\n throw new TypeError('fromIterable() requires an async iterable');\n }\n return decodeReaderComplete(decoder.asyncIterableReader(asyncIterable));\n }\n}\nasync function decodeReaderComplete(reader) {\n const decoder$1 = decoder.createDecoder(reader);\n const {version, roots} = await decoder$1.header();\n const blocks = [];\n for await (const block of decoder$1.blocks()) {\n blocks.push(block);\n }\n return new CarReader(version, roots, blocks);\n}\nconst __browser = true;\n\nexports.CarReader = CarReader;\nexports.__browser = __browser;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar fs = require('fs');\nvar util = require('util');\nvar readerBrowser = require('./reader-browser.js');\n\nfunction _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }\n\nvar fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);\n\nconst fsread = util.promisify(fs__default['default'].read);\nclass CarReader extends readerBrowser.CarReader {\n static async readRaw(fd, blockIndex) {\n const {cid, blockLength, blockOffset} = blockIndex;\n const bytes = new Uint8Array(blockLength);\n let read;\n if (typeof fd === 'number') {\n read = (await fsread(fd, bytes, 0, blockLength, blockOffset)).bytesRead;\n } else if (typeof fd === 'object' && typeof fd.read === 'function') {\n read = (await fd.read(bytes, 0, blockLength, blockOffset)).bytesRead;\n } else {\n throw new TypeError('Bad fd');\n }\n if (read !== blockLength) {\n throw new Error(`Failed to read entire block (${ read } instead of ${ blockLength })`);\n }\n return {\n cid,\n bytes\n };\n }\n}\nconst __browser = false;\n\nexports.CarReader = CarReader;\nexports.__browser = __browser;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar cid = require('multiformats/cid');\nvar encoder = require('./encoder.js');\nvar iteratorChannel = require('./iterator-channel.js');\nvar decoder = require('./decoder.js');\n\nclass CarWriter {\n constructor(roots, encoder) {\n this._encoder = encoder;\n this._mutex = encoder.setRoots(roots);\n this._ended = false;\n }\n async put(block) {\n if (!(block.bytes instanceof Uint8Array) || !block.cid) {\n throw new TypeError('Can only write {cid, bytes} objects');\n }\n if (this._ended) {\n throw new Error('Already closed');\n }\n const cid$1 = cid.CID.asCID(block.cid);\n if (!cid$1) {\n throw new TypeError('Can only write {cid, bytes} objects');\n }\n this._mutex = this._mutex.then(() => this._encoder.writeBlock({\n cid: cid$1,\n bytes: block.bytes\n }));\n return this._mutex;\n }\n async close() {\n if (this._ended) {\n throw new Error('Already closed');\n }\n await this._mutex;\n this._ended = true;\n return this._encoder.close();\n }\n static create(roots) {\n roots = toRoots(roots);\n const {encoder, iterator} = encodeWriter();\n const writer = new CarWriter(roots, encoder);\n const out = new CarWriterOut(iterator);\n return {\n writer,\n out\n };\n }\n static createAppender() {\n const {encoder, iterator} = encodeWriter();\n encoder.setRoots = () => Promise.resolve();\n const writer = new CarWriter([], encoder);\n const out = new CarWriterOut(iterator);\n return {\n writer,\n out\n };\n }\n static async updateRootsInBytes(bytes, roots) {\n const reader = decoder.bytesReader(bytes);\n await decoder.readHeader(reader);\n const newHeader = encoder.createHeader(roots);\n if (reader.pos !== newHeader.length) {\n throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${ reader.pos } bytes, new header is ${ newHeader.length } bytes)`);\n }\n bytes.set(newHeader, 0);\n return bytes;\n }\n}\nclass CarWriterOut {\n constructor(iterator) {\n this._iterator = iterator;\n }\n [Symbol.asyncIterator]() {\n if (this._iterating) {\n throw new Error('Multiple iterator not supported');\n }\n this._iterating = true;\n return this._iterator;\n }\n}\nfunction encodeWriter() {\n const iw = iteratorChannel.create();\n const {writer, iterator} = iw;\n const encoder$1 = encoder.createEncoder(writer);\n return {\n encoder: encoder$1,\n iterator\n };\n}\nfunction toRoots(roots) {\n if (roots === undefined) {\n return [];\n }\n if (!Array.isArray(roots)) {\n const cid$1 = cid.CID.asCID(roots);\n if (!cid$1) {\n throw new TypeError('roots must be a single CID or an array of CIDs');\n }\n return [cid$1];\n }\n const _roots = [];\n for (const root of roots) {\n const _root = cid.CID.asCID(root);\n if (!_root) {\n throw new TypeError('roots must be a single CID or an array of CIDs');\n }\n _roots.push(_root);\n }\n return _roots;\n}\nconst __browser = true;\n\nexports.CarWriter = CarWriter;\nexports.CarWriterOut = CarWriterOut;\nexports.__browser = __browser;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar fs = require('fs');\nvar util = require('util');\nvar writerBrowser = require('./writer-browser.js');\nvar decoder = require('./decoder.js');\nvar encoder = require('./encoder.js');\n\nfunction _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }\n\nvar fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);\n\nconst fsread = util.promisify(fs__default['default'].read);\nconst fswrite = util.promisify(fs__default['default'].write);\nclass CarWriter extends writerBrowser.CarWriter {\n static async updateRootsInFile(fd, roots) {\n const chunkSize = 256;\n let bytes;\n let offset = 0;\n let readChunk;\n if (typeof fd === 'number') {\n readChunk = async () => (await fsread(fd, bytes, 0, chunkSize, offset)).bytesRead;\n } else if (typeof fd === 'object' && typeof fd.read === 'function') {\n readChunk = async () => (await fd.read(bytes, 0, chunkSize, offset)).bytesRead;\n } else {\n throw new TypeError('Bad fd');\n }\n const fdReader = decoder.chunkReader(async () => {\n bytes = new Uint8Array(chunkSize);\n const read = await readChunk();\n offset += read;\n return read < chunkSize ? bytes.subarray(0, read) : bytes;\n });\n await decoder.readHeader(fdReader);\n const newHeader = encoder.createHeader(roots);\n if (fdReader.pos !== newHeader.length) {\n throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${ fdReader.pos } bytes, new header is ${ newHeader.length } bytes)`);\n }\n if (typeof fd === 'number') {\n await fswrite(fd, newHeader, 0, newHeader.length, 0);\n } else if (typeof fd === 'object' && typeof fd.read === 'function') {\n await fd.write(newHeader, 0, newHeader.length, 0);\n }\n }\n}\nconst __browser = false;\n\nexports.CarWriter = CarWriter;\nexports.__browser = __browser;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar cborg = require('cborg');\nvar cid = require('multiformats/cid');\n\nfunction _interopNamespace(e) {\n if (e && e.__esModule) return e;\n var n = Object.create(null);\n if (e) {\n Object.keys(e).forEach(function (k) {\n if (k !== 'default') {\n var d = Object.getOwnPropertyDescriptor(e, k);\n Object.defineProperty(n, k, d.get ? d : {\n enumerable: true,\n get: function () {\n return e[k];\n }\n });\n }\n });\n }\n n['default'] = e;\n return Object.freeze(n);\n}\n\nvar cborg__namespace = /*#__PURE__*/_interopNamespace(cborg);\n\nconst CID_CBOR_TAG = 42;\nfunction cidEncoder(obj) {\n if (obj.asCID !== obj) {\n return null;\n }\n const cid$1 = cid.CID.asCID(obj);\n if (!cid$1) {\n return null;\n }\n const bytes = new Uint8Array(cid$1.bytes.byteLength + 1);\n bytes.set(cid$1.bytes, 1);\n return [\n new cborg__namespace.Token(cborg__namespace.Type.tag, CID_CBOR_TAG),\n new cborg__namespace.Token(cborg__namespace.Type.bytes, bytes)\n ];\n}\nfunction undefinedEncoder() {\n throw new Error('`undefined` is not supported by the IPLD Data Model and cannot be encoded');\n}\nfunction numberEncoder(num) {\n if (Number.isNaN(num)) {\n throw new Error('`NaN` is not supported by the IPLD Data Model and cannot be encoded');\n }\n if (num === Infinity || num === -Infinity) {\n throw new Error('`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded');\n }\n return null;\n}\nconst encodeOptions = {\n float64: true,\n typeEncoders: {\n Object: cidEncoder,\n undefined: undefinedEncoder,\n number: numberEncoder\n }\n};\nfunction cidDecoder(bytes) {\n if (bytes[0] !== 0) {\n throw new Error('Invalid CID for CBOR tag 42; expected leading 0x00');\n }\n return cid.CID.decode(bytes.subarray(1));\n}\nconst decodeOptions = {\n allowIndefinite: false,\n allowUndefined: false,\n allowNaN: false,\n allowInfinity: false,\n allowBigInt: true,\n strict: true,\n useMaps: false,\n tags: []\n};\ndecodeOptions.tags[CID_CBOR_TAG] = cidDecoder;\nconst name = 'dag-cbor';\nconst code = 113;\nconst encode = node => cborg__namespace.encode(node, encodeOptions);\nconst decode = data => cborg__namespace.decode(data, decodeOptions);\n\nexports.code = code;\nexports.decode = decode;\nexports.encode = encode;\nexports.name = name;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar cid = require('multiformats/cid');\nvar pbDecode = require('./pb-decode.js');\nvar pbEncode = require('./pb-encode.js');\nvar util = require('./util.js');\n\nconst name = 'dag-pb';\nconst code = 112;\nfunction encode(node) {\n util.validate(node);\n const pbn = {};\n if (node.Links) {\n pbn.Links = node.Links.map(l => {\n const link = {};\n if (l.Hash) {\n link.Hash = l.Hash.bytes;\n }\n if (l.Name !== undefined) {\n link.Name = l.Name;\n }\n if (l.Tsize !== undefined) {\n link.Tsize = l.Tsize;\n }\n return link;\n });\n }\n if (node.Data) {\n pbn.Data = node.Data;\n }\n return pbEncode.encodeNode(pbn);\n}\nfunction decode(bytes) {\n const pbn = pbDecode.decodeNode(bytes);\n const node = {};\n if (pbn.Data) {\n node.Data = pbn.Data;\n }\n if (pbn.Links) {\n node.Links = pbn.Links.map(l => {\n const link = {};\n try {\n link.Hash = cid.CID.decode(l.Hash);\n } catch (e) {\n }\n if (!link.Hash) {\n throw new Error('Invalid Hash field found in link, expected CID');\n }\n if (l.Name !== undefined) {\n link.Name = l.Name;\n }\n if (l.Tsize !== undefined) {\n link.Tsize = l.Tsize;\n }\n return link;\n });\n }\n return node;\n}\n\nexports.createLink = util.createLink;\nexports.createNode = util.createNode;\nexports.prepare = util.prepare;\nexports.validate = util.validate;\nexports.code = code;\nexports.decode = decode;\nexports.encode = encode;\nexports.name = name;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst textDecoder = new TextDecoder();\nfunction decodeVarint(bytes, offset) {\n let v = 0;\n for (let shift = 0;; shift += 7) {\n if (shift >= 64) {\n throw new Error('protobuf: varint overflow');\n }\n if (offset >= bytes.length) {\n throw new Error('protobuf: unexpected end of data');\n }\n const b = bytes[offset++];\n v += shift < 28 ? (b & 127) << shift : (b & 127) * 2 ** shift;\n if (b < 128) {\n break;\n }\n }\n return [\n v,\n offset\n ];\n}\nfunction decodeBytes(bytes, offset) {\n let byteLen;\n [byteLen, offset] = decodeVarint(bytes, offset);\n const postOffset = offset + byteLen;\n if (byteLen < 0 || postOffset < 0) {\n throw new Error('protobuf: invalid length');\n }\n if (postOffset > bytes.length) {\n throw new Error('protobuf: unexpected end of data');\n }\n return [\n bytes.subarray(offset, postOffset),\n postOffset\n ];\n}\nfunction decodeKey(bytes, index) {\n let wire;\n [wire, index] = decodeVarint(bytes, index);\n return [\n wire & 7,\n wire >> 3,\n index\n ];\n}\nfunction decodeLink(bytes) {\n const link = {};\n const l = bytes.length;\n let index = 0;\n while (index < l) {\n let wireType, fieldNum;\n [wireType, fieldNum, index] = decodeKey(bytes, index);\n if (fieldNum === 1) {\n if (link.Hash) {\n throw new Error('protobuf: (PBLink) duplicate Hash section');\n }\n if (wireType !== 2) {\n throw new Error(`protobuf: (PBLink) wrong wireType (${ wireType }) for Hash`);\n }\n if (link.Name !== undefined) {\n throw new Error('protobuf: (PBLink) invalid order, found Name before Hash');\n }\n if (link.Tsize !== undefined) {\n throw new Error('protobuf: (PBLink) invalid order, found Tsize before Hash');\n }\n ;\n [link.Hash, index] = decodeBytes(bytes, index);\n } else if (fieldNum === 2) {\n if (link.Name !== undefined) {\n throw new Error('protobuf: (PBLink) duplicate Name section');\n }\n if (wireType !== 2) {\n throw new Error(`protobuf: (PBLink) wrong wireType (${ wireType }) for Name`);\n }\n if (link.Tsize !== undefined) {\n throw new Error('protobuf: (PBLink) invalid order, found Tsize before Name');\n }\n let byts;\n [byts, index] = decodeBytes(bytes, index);\n link.Name = textDecoder.decode(byts);\n } else if (fieldNum === 3) {\n if (link.Tsize !== undefined) {\n throw new Error('protobuf: (PBLink) duplicate Tsize section');\n }\n if (wireType !== 0) {\n throw new Error(`protobuf: (PBLink) wrong wireType (${ wireType }) for Tsize`);\n }\n ;\n [link.Tsize, index] = decodeVarint(bytes, index);\n } else {\n throw new Error(`protobuf: (PBLink) invalid fieldNumber, expected 1, 2 or 3, got ${ fieldNum }`);\n }\n }\n if (index > l) {\n throw new Error('protobuf: (PBLink) unexpected end of data');\n }\n return link;\n}\nfunction decodeNode(bytes) {\n const l = bytes.length;\n let index = 0;\n let links;\n let linksBeforeData = false;\n let data;\n while (index < l) {\n let wireType, fieldNum;\n [wireType, fieldNum, index] = decodeKey(bytes, index);\n if (wireType !== 2) {\n throw new Error(`protobuf: (PBNode) invalid wireType, expected 2, got ${ wireType }`);\n }\n if (fieldNum === 1) {\n if (data) {\n throw new Error('protobuf: (PBNode) duplicate Data section');\n }\n ;\n [data, index] = decodeBytes(bytes, index);\n if (links) {\n linksBeforeData = true;\n }\n } else if (fieldNum === 2) {\n if (linksBeforeData) {\n throw new Error('protobuf: (PBNode) duplicate Links section');\n } else if (!links) {\n links = [];\n }\n let byts;\n [byts, index] = decodeBytes(bytes, index);\n links.push(decodeLink(byts));\n } else {\n throw new Error(`protobuf: (PBNode) invalid fieldNumber, expected 1 or 2, got ${ fieldNum }`);\n }\n }\n if (index > l) {\n throw new Error('protobuf: (PBNode) unexpected end of data');\n }\n const node = {};\n if (data) {\n node.Data = data;\n }\n node.Links = links || [];\n return node;\n}\n\nexports.decodeNode = decodeNode;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst textEncoder = new TextEncoder();\nconst maxInt32 = 2 ** 32;\nconst maxUInt32 = 2 ** 31;\nfunction encodeLink(link, bytes) {\n let i = bytes.length;\n if (typeof link.Tsize === 'number') {\n if (link.Tsize < 0) {\n throw new Error('Tsize cannot be negative');\n }\n if (!Number.isSafeInteger(link.Tsize)) {\n throw new Error('Tsize too large for encoding');\n }\n i = encodeVarint(bytes, i, link.Tsize) - 1;\n bytes[i] = 24;\n }\n if (typeof link.Name === 'string') {\n const nameBytes = textEncoder.encode(link.Name);\n i -= nameBytes.length;\n bytes.set(nameBytes, i);\n i = encodeVarint(bytes, i, nameBytes.length) - 1;\n bytes[i] = 18;\n }\n if (link.Hash) {\n i -= link.Hash.length;\n bytes.set(link.Hash, i);\n i = encodeVarint(bytes, i, link.Hash.length) - 1;\n bytes[i] = 10;\n }\n return bytes.length - i;\n}\nfunction encodeNode(node) {\n const size = sizeNode(node);\n const bytes = new Uint8Array(size);\n let i = size;\n if (node.Data) {\n i -= node.Data.length;\n bytes.set(node.Data, i);\n i = encodeVarint(bytes, i, node.Data.length) - 1;\n bytes[i] = 10;\n }\n if (node.Links) {\n for (let index = node.Links.length - 1; index >= 0; index--) {\n const size = encodeLink(node.Links[index], bytes.subarray(0, i));\n i -= size;\n i = encodeVarint(bytes, i, size) - 1;\n bytes[i] = 18;\n }\n }\n return bytes;\n}\nfunction sizeLink(link) {\n let n = 0;\n if (link.Hash) {\n const l = link.Hash.length;\n n += 1 + l + sov(l);\n }\n if (typeof link.Name === 'string') {\n const l = textEncoder.encode(link.Name).length;\n n += 1 + l + sov(l);\n }\n if (typeof link.Tsize === 'number') {\n n += 1 + sov(link.Tsize);\n }\n return n;\n}\nfunction sizeNode(node) {\n let n = 0;\n if (node.Data) {\n const l = node.Data.length;\n n += 1 + l + sov(l);\n }\n if (node.Links) {\n for (const link of node.Links) {\n const l = sizeLink(link);\n n += 1 + l + sov(l);\n }\n }\n return n;\n}\nfunction encodeVarint(bytes, offset, v) {\n offset -= sov(v);\n const base = offset;\n while (v >= maxUInt32) {\n bytes[offset++] = v & 127 | 128;\n v /= 128;\n }\n while (v >= 128) {\n bytes[offset++] = v & 127 | 128;\n v >>>= 7;\n }\n bytes[offset] = v;\n return base;\n}\nfunction sov(x) {\n if (x % 2 === 0) {\n x++;\n }\n return Math.floor((len64(x) + 6) / 7);\n}\nfunction len64(x) {\n let n = 0;\n if (x >= maxInt32) {\n x = Math.floor(x / maxInt32);\n n = 32;\n }\n if (x >= 1 << 16) {\n x >>>= 16;\n n += 16;\n }\n if (x >= 1 << 8) {\n x >>>= 8;\n n += 8;\n }\n return n + len8tab[x];\n}\nconst len8tab = [\n 0,\n 1,\n 2,\n 2,\n 3,\n 3,\n 3,\n 3,\n 4,\n 4,\n 4,\n 4,\n 4,\n 4,\n 4,\n 4,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 5,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 6,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 7,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8,\n 8\n];\n\nexports.encodeNode = encodeNode;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar cid = require('multiformats/cid');\n\nconst pbNodeProperties = [\n 'Data',\n 'Links'\n];\nconst pbLinkProperties = [\n 'Hash',\n 'Name',\n 'Tsize'\n];\nconst textEncoder = new TextEncoder();\nfunction linkComparator(a, b) {\n if (a === b) {\n return 0;\n }\n const abuf = a.Name ? textEncoder.encode(a.Name) : [];\n const bbuf = b.Name ? textEncoder.encode(b.Name) : [];\n let x = abuf.length;\n let y = bbuf.length;\n for (let i = 0, len = Math.min(x, y); i < len; ++i) {\n if (abuf[i] !== bbuf[i]) {\n x = abuf[i];\n y = bbuf[i];\n break;\n }\n }\n return x < y ? -1 : y < x ? 1 : 0;\n}\nfunction hasOnlyProperties(node, properties) {\n return !Object.keys(node).some(p => !properties.includes(p));\n}\nfunction asLink(link) {\n if (typeof link.asCID === 'object') {\n const Hash = cid.CID.asCID(link);\n if (!Hash) {\n throw new TypeError('Invalid DAG-PB form');\n }\n return { Hash };\n }\n if (typeof link !== 'object' || Array.isArray(link)) {\n throw new TypeError('Invalid DAG-PB form');\n }\n const pbl = {};\n if (link.Hash) {\n let cid$1 = cid.CID.asCID(link.Hash);\n try {\n if (!cid$1) {\n if (typeof link.Hash === 'string') {\n cid$1 = cid.CID.parse(link.Hash);\n } else if (link.Hash instanceof Uint8Array) {\n cid$1 = cid.CID.decode(link.Hash);\n }\n }\n } catch (e) {\n throw new TypeError(`Invalid DAG-PB form: ${ e.message }`);\n }\n if (cid$1) {\n pbl.Hash = cid$1;\n }\n }\n if (!pbl.Hash) {\n throw new TypeError('Invalid DAG-PB form');\n }\n if (typeof link.Name === 'string') {\n pbl.Name = link.Name;\n }\n if (typeof link.Tsize === 'number') {\n pbl.Tsize = link.Tsize;\n }\n return pbl;\n}\nfunction prepare(node) {\n if (node instanceof Uint8Array || typeof node === 'string') {\n node = { Data: node };\n }\n if (typeof node !== 'object' || Array.isArray(node)) {\n throw new TypeError('Invalid DAG-PB form');\n }\n const pbn = {};\n if (node.Data !== undefined) {\n if (typeof node.Data === 'string') {\n pbn.Data = textEncoder.encode(node.Data);\n } else if (node.Data instanceof Uint8Array) {\n pbn.Data = node.Data;\n } else {\n throw new TypeError('Invalid DAG-PB form');\n }\n }\n if (node.Links !== undefined) {\n if (Array.isArray(node.Links)) {\n pbn.Links = node.Links.map(asLink);\n pbn.Links.sort(linkComparator);\n } else {\n throw new TypeError('Invalid DAG-PB form');\n }\n } else {\n pbn.Links = [];\n }\n return pbn;\n}\nfunction validate(node) {\n if (!node || typeof node !== 'object' || Array.isArray(node)) {\n throw new TypeError('Invalid DAG-PB form');\n }\n if (!hasOnlyProperties(node, pbNodeProperties)) {\n throw new TypeError('Invalid DAG-PB form (extraneous properties)');\n }\n if (node.Data !== undefined && !(node.Data instanceof Uint8Array)) {\n throw new TypeError('Invalid DAG-PB form (Data must be a Uint8Array)');\n }\n if (!Array.isArray(node.Links)) {\n throw new TypeError('Invalid DAG-PB form (Links must be an array)');\n }\n for (let i = 0; i < node.Links.length; i++) {\n const link = node.Links[i];\n if (!link || typeof link !== 'object' || Array.isArray(link)) {\n throw new TypeError('Invalid DAG-PB form (bad link object)');\n }\n if (!hasOnlyProperties(link, pbLinkProperties)) {\n throw new TypeError('Invalid DAG-PB form (extraneous properties on link object)');\n }\n if (!link.Hash) {\n throw new TypeError('Invalid DAG-PB form (link must have a Hash)');\n }\n if (link.Hash.asCID !== link.Hash) {\n throw new TypeError('Invalid DAG-PB form (link Hash must be a CID)');\n }\n if (link.Name !== undefined && typeof link.Name !== 'string') {\n throw new TypeError('Invalid DAG-PB form (link Name must be a string)');\n }\n if (link.Tsize !== undefined && (typeof link.Tsize !== 'number' || link.Tsize % 1 !== 0)) {\n throw new TypeError('Invalid DAG-PB form (link Tsize must be an integer)');\n }\n if (i > 0 && linkComparator(link, node.Links[i - 1]) === -1) {\n throw new TypeError('Invalid DAG-PB form (links must be sorted by Name bytes)');\n }\n }\n}\nfunction createNode(data, links = []) {\n return prepare({\n Data: data,\n Links: links\n });\n}\nfunction createLink(name, size, cid) {\n return asLink({\n Hash: cid,\n Name: name,\n Tsize: size\n });\n}\n\nexports.createLink = createLink;\nexports.createNode = createNode;\nexports.prepare = prepare;\nexports.validate = validate;\n","\"use strict\";\r\nmodule.exports = asPromise;\r\n\r\n/**\r\n * Callback as used by {@link util.asPromise}.\r\n * @typedef asPromiseCallback\r\n * @type {function}\r\n * @param {Error|null} error Error, if any\r\n * @param {...*} params Additional arguments\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Returns a promise from a node-style callback function.\r\n * @memberof util\r\n * @param {asPromiseCallback} fn Function to call\r\n * @param {*} ctx Function context\r\n * @param {...*} params Function arguments\r\n * @returns {Promise<*>} Promisified function\r\n */\r\nfunction asPromise(fn, ctx/*, varargs */) {\r\n var params = new Array(arguments.length - 1),\r\n offset = 0,\r\n index = 2,\r\n pending = true;\r\n while (index < arguments.length)\r\n params[offset++] = arguments[index++];\r\n return new Promise(function executor(resolve, reject) {\r\n params[offset] = function callback(err/*, varargs */) {\r\n if (pending) {\r\n pending = false;\r\n if (err)\r\n reject(err);\r\n else {\r\n var params = new Array(arguments.length - 1),\r\n offset = 0;\r\n while (offset < params.length)\r\n params[offset++] = arguments[offset];\r\n resolve.apply(null, params);\r\n }\r\n }\r\n };\r\n try {\r\n fn.apply(ctx || null, params);\r\n } catch (err) {\r\n if (pending) {\r\n pending = false;\r\n reject(err);\r\n }\r\n }\r\n });\r\n}\r\n","\"use strict\";\r\n\r\n/**\r\n * A minimal base64 implementation for number arrays.\r\n * @memberof util\r\n * @namespace\r\n */\r\nvar base64 = exports;\r\n\r\n/**\r\n * Calculates the byte length of a base64 encoded string.\r\n * @param {string} string Base64 encoded string\r\n * @returns {number} Byte length\r\n */\r\nbase64.length = function length(string) {\r\n var p = string.length;\r\n if (!p)\r\n return 0;\r\n var n = 0;\r\n while (--p % 4 > 1 && string.charAt(p) === \"=\")\r\n ++n;\r\n return Math.ceil(string.length * 3) / 4 - n;\r\n};\r\n\r\n// Base64 encoding table\r\nvar b64 = new Array(64);\r\n\r\n// Base64 decoding table\r\nvar s64 = new Array(123);\r\n\r\n// 65..90, 97..122, 48..57, 43, 47\r\nfor (var i = 0; i < 64;)\r\n s64[b64[i] = i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i - 59 | 43] = i++;\r\n\r\n/**\r\n * Encodes a buffer to a base64 encoded string.\r\n * @param {Uint8Array} buffer Source buffer\r\n * @param {number} start Source start\r\n * @param {number} end Source end\r\n * @returns {string} Base64 encoded string\r\n */\r\nbase64.encode = function encode(buffer, start, end) {\r\n var parts = null,\r\n chunk = [];\r\n var i = 0, // output index\r\n j = 0, // goto index\r\n t; // temporary\r\n while (start < end) {\r\n var b = buffer[start++];\r\n switch (j) {\r\n case 0:\r\n chunk[i++] = b64[b >> 2];\r\n t = (b & 3) << 4;\r\n j = 1;\r\n break;\r\n case 1:\r\n chunk[i++] = b64[t | b >> 4];\r\n t = (b & 15) << 2;\r\n j = 2;\r\n break;\r\n case 2:\r\n chunk[i++] = b64[t | b >> 6];\r\n chunk[i++] = b64[b & 63];\r\n j = 0;\r\n break;\r\n }\r\n if (i > 8191) {\r\n (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));\r\n i = 0;\r\n }\r\n }\r\n if (j) {\r\n chunk[i++] = b64[t];\r\n chunk[i++] = 61;\r\n if (j === 1)\r\n chunk[i++] = 61;\r\n }\r\n if (parts) {\r\n if (i)\r\n parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));\r\n return parts.join(\"\");\r\n }\r\n return String.fromCharCode.apply(String, chunk.slice(0, i));\r\n};\r\n\r\nvar invalidEncoding = \"invalid encoding\";\r\n\r\n/**\r\n * Decodes a base64 encoded string to a buffer.\r\n * @param {string} string Source string\r\n * @param {Uint8Array} buffer Destination buffer\r\n * @param {number} offset Destination offset\r\n * @returns {number} Number of bytes written\r\n * @throws {Error} If encoding is invalid\r\n */\r\nbase64.decode = function decode(string, buffer, offset) {\r\n var start = offset;\r\n var j = 0, // goto index\r\n t; // temporary\r\n for (var i = 0; i < string.length;) {\r\n var c = string.charCodeAt(i++);\r\n if (c === 61 && j > 1)\r\n break;\r\n if ((c = s64[c]) === undefined)\r\n throw Error(invalidEncoding);\r\n switch (j) {\r\n case 0:\r\n t = c;\r\n j = 1;\r\n break;\r\n case 1:\r\n buffer[offset++] = t << 2 | (c & 48) >> 4;\r\n t = c;\r\n j = 2;\r\n break;\r\n case 2:\r\n buffer[offset++] = (t & 15) << 4 | (c & 60) >> 2;\r\n t = c;\r\n j = 3;\r\n break;\r\n case 3:\r\n buffer[offset++] = (t & 3) << 6 | c;\r\n j = 0;\r\n break;\r\n }\r\n }\r\n if (j === 1)\r\n throw Error(invalidEncoding);\r\n return offset - start;\r\n};\r\n\r\n/**\r\n * Tests if the specified string appears to be base64 encoded.\r\n * @param {string} string String to test\r\n * @returns {boolean} `true` if probably base64 encoded, otherwise false\r\n */\r\nbase64.test = function test(string) {\r\n return /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(string);\r\n};\r\n","\"use strict\";\r\nmodule.exports = EventEmitter;\r\n\r\n/**\r\n * Constructs a new event emitter instance.\r\n * @classdesc A minimal event emitter.\r\n * @memberof util\r\n * @constructor\r\n */\r\nfunction EventEmitter() {\r\n\r\n /**\r\n * Registered listeners.\r\n * @type {Object.}\r\n * @private\r\n */\r\n this._listeners = {};\r\n}\r\n\r\n/**\r\n * Registers an event listener.\r\n * @param {string} evt Event name\r\n * @param {function} fn Listener\r\n * @param {*} [ctx] Listener context\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.on = function on(evt, fn, ctx) {\r\n (this._listeners[evt] || (this._listeners[evt] = [])).push({\r\n fn : fn,\r\n ctx : ctx || this\r\n });\r\n return this;\r\n};\r\n\r\n/**\r\n * Removes an event listener or any matching listeners if arguments are omitted.\r\n * @param {string} [evt] Event name. Removes all listeners if omitted.\r\n * @param {function} [fn] Listener to remove. Removes all listeners of `evt` if omitted.\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.off = function off(evt, fn) {\r\n if (evt === undefined)\r\n this._listeners = {};\r\n else {\r\n if (fn === undefined)\r\n this._listeners[evt] = [];\r\n else {\r\n var listeners = this._listeners[evt];\r\n for (var i = 0; i < listeners.length;)\r\n if (listeners[i].fn === fn)\r\n listeners.splice(i, 1);\r\n else\r\n ++i;\r\n }\r\n }\r\n return this;\r\n};\r\n\r\n/**\r\n * Emits an event by calling its listeners with the specified arguments.\r\n * @param {string} evt Event name\r\n * @param {...*} args Arguments\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.emit = function emit(evt) {\r\n var listeners = this._listeners[evt];\r\n if (listeners) {\r\n var args = [],\r\n i = 1;\r\n for (; i < arguments.length;)\r\n args.push(arguments[i++]);\r\n for (i = 0; i < listeners.length;)\r\n listeners[i].fn.apply(listeners[i++].ctx, args);\r\n }\r\n return this;\r\n};\r\n","\"use strict\";\r\n\r\nmodule.exports = factory(factory);\r\n\r\n/**\r\n * Reads / writes floats / doubles from / to buffers.\r\n * @name util.float\r\n * @namespace\r\n */\r\n\r\n/**\r\n * Writes a 32 bit float to a buffer using little endian byte order.\r\n * @name util.float.writeFloatLE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Writes a 32 bit float to a buffer using big endian byte order.\r\n * @name util.float.writeFloatBE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Reads a 32 bit float from a buffer using little endian byte order.\r\n * @name util.float.readFloatLE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Reads a 32 bit float from a buffer using big endian byte order.\r\n * @name util.float.readFloatBE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Writes a 64 bit double to a buffer using little endian byte order.\r\n * @name util.float.writeDoubleLE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Writes a 64 bit double to a buffer using big endian byte order.\r\n * @name util.float.writeDoubleBE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Reads a 64 bit double from a buffer using little endian byte order.\r\n * @name util.float.readDoubleLE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Reads a 64 bit double from a buffer using big endian byte order.\r\n * @name util.float.readDoubleBE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n// Factory function for the purpose of node-based testing in modified global environments\r\nfunction factory(exports) {\r\n\r\n // float: typed array\r\n if (typeof Float32Array !== \"undefined\") (function() {\r\n\r\n var f32 = new Float32Array([ -0 ]),\r\n f8b = new Uint8Array(f32.buffer),\r\n le = f8b[3] === 128;\r\n\r\n function writeFloat_f32_cpy(val, buf, pos) {\r\n f32[0] = val;\r\n buf[pos ] = f8b[0];\r\n buf[pos + 1] = f8b[1];\r\n buf[pos + 2] = f8b[2];\r\n buf[pos + 3] = f8b[3];\r\n }\r\n\r\n function writeFloat_f32_rev(val, buf, pos) {\r\n f32[0] = val;\r\n buf[pos ] = f8b[3];\r\n buf[pos + 1] = f8b[2];\r\n buf[pos + 2] = f8b[1];\r\n buf[pos + 3] = f8b[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.writeFloatLE = le ? writeFloat_f32_cpy : writeFloat_f32_rev;\r\n /* istanbul ignore next */\r\n exports.writeFloatBE = le ? writeFloat_f32_rev : writeFloat_f32_cpy;\r\n\r\n function readFloat_f32_cpy(buf, pos) {\r\n f8b[0] = buf[pos ];\r\n f8b[1] = buf[pos + 1];\r\n f8b[2] = buf[pos + 2];\r\n f8b[3] = buf[pos + 3];\r\n return f32[0];\r\n }\r\n\r\n function readFloat_f32_rev(buf, pos) {\r\n f8b[3] = buf[pos ];\r\n f8b[2] = buf[pos + 1];\r\n f8b[1] = buf[pos + 2];\r\n f8b[0] = buf[pos + 3];\r\n return f32[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.readFloatLE = le ? readFloat_f32_cpy : readFloat_f32_rev;\r\n /* istanbul ignore next */\r\n exports.readFloatBE = le ? readFloat_f32_rev : readFloat_f32_cpy;\r\n\r\n // float: ieee754\r\n })(); else (function() {\r\n\r\n function writeFloat_ieee754(writeUint, val, buf, pos) {\r\n var sign = val < 0 ? 1 : 0;\r\n if (sign)\r\n val = -val;\r\n if (val === 0)\r\n writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos);\r\n else if (isNaN(val))\r\n writeUint(2143289344, buf, pos);\r\n else if (val > 3.4028234663852886e+38) // +-Infinity\r\n writeUint((sign << 31 | 2139095040) >>> 0, buf, pos);\r\n else if (val < 1.1754943508222875e-38) // denormal\r\n writeUint((sign << 31 | Math.round(val / 1.401298464324817e-45)) >>> 0, buf, pos);\r\n else {\r\n var exponent = Math.floor(Math.log(val) / Math.LN2),\r\n mantissa = Math.round(val * Math.pow(2, -exponent) * 8388608) & 8388607;\r\n writeUint((sign << 31 | exponent + 127 << 23 | mantissa) >>> 0, buf, pos);\r\n }\r\n }\r\n\r\n exports.writeFloatLE = writeFloat_ieee754.bind(null, writeUintLE);\r\n exports.writeFloatBE = writeFloat_ieee754.bind(null, writeUintBE);\r\n\r\n function readFloat_ieee754(readUint, buf, pos) {\r\n var uint = readUint(buf, pos),\r\n sign = (uint >> 31) * 2 + 1,\r\n exponent = uint >>> 23 & 255,\r\n mantissa = uint & 8388607;\r\n return exponent === 255\r\n ? mantissa\r\n ? NaN\r\n : sign * Infinity\r\n : exponent === 0 // denormal\r\n ? sign * 1.401298464324817e-45 * mantissa\r\n : sign * Math.pow(2, exponent - 150) * (mantissa + 8388608);\r\n }\r\n\r\n exports.readFloatLE = readFloat_ieee754.bind(null, readUintLE);\r\n exports.readFloatBE = readFloat_ieee754.bind(null, readUintBE);\r\n\r\n })();\r\n\r\n // double: typed array\r\n if (typeof Float64Array !== \"undefined\") (function() {\r\n\r\n var f64 = new Float64Array([-0]),\r\n f8b = new Uint8Array(f64.buffer),\r\n le = f8b[7] === 128;\r\n\r\n function writeDouble_f64_cpy(val, buf, pos) {\r\n f64[0] = val;\r\n buf[pos ] = f8b[0];\r\n buf[pos + 1] = f8b[1];\r\n buf[pos + 2] = f8b[2];\r\n buf[pos + 3] = f8b[3];\r\n buf[pos + 4] = f8b[4];\r\n buf[pos + 5] = f8b[5];\r\n buf[pos + 6] = f8b[6];\r\n buf[pos + 7] = f8b[7];\r\n }\r\n\r\n function writeDouble_f64_rev(val, buf, pos) {\r\n f64[0] = val;\r\n buf[pos ] = f8b[7];\r\n buf[pos + 1] = f8b[6];\r\n buf[pos + 2] = f8b[5];\r\n buf[pos + 3] = f8b[4];\r\n buf[pos + 4] = f8b[3];\r\n buf[pos + 5] = f8b[2];\r\n buf[pos + 6] = f8b[1];\r\n buf[pos + 7] = f8b[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.writeDoubleLE = le ? writeDouble_f64_cpy : writeDouble_f64_rev;\r\n /* istanbul ignore next */\r\n exports.writeDoubleBE = le ? writeDouble_f64_rev : writeDouble_f64_cpy;\r\n\r\n function readDouble_f64_cpy(buf, pos) {\r\n f8b[0] = buf[pos ];\r\n f8b[1] = buf[pos + 1];\r\n f8b[2] = buf[pos + 2];\r\n f8b[3] = buf[pos + 3];\r\n f8b[4] = buf[pos + 4];\r\n f8b[5] = buf[pos + 5];\r\n f8b[6] = buf[pos + 6];\r\n f8b[7] = buf[pos + 7];\r\n return f64[0];\r\n }\r\n\r\n function readDouble_f64_rev(buf, pos) {\r\n f8b[7] = buf[pos ];\r\n f8b[6] = buf[pos + 1];\r\n f8b[5] = buf[pos + 2];\r\n f8b[4] = buf[pos + 3];\r\n f8b[3] = buf[pos + 4];\r\n f8b[2] = buf[pos + 5];\r\n f8b[1] = buf[pos + 6];\r\n f8b[0] = buf[pos + 7];\r\n return f64[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.readDoubleLE = le ? readDouble_f64_cpy : readDouble_f64_rev;\r\n /* istanbul ignore next */\r\n exports.readDoubleBE = le ? readDouble_f64_rev : readDouble_f64_cpy;\r\n\r\n // double: ieee754\r\n })(); else (function() {\r\n\r\n function writeDouble_ieee754(writeUint, off0, off1, val, buf, pos) {\r\n var sign = val < 0 ? 1 : 0;\r\n if (sign)\r\n val = -val;\r\n if (val === 0) {\r\n writeUint(0, buf, pos + off0);\r\n writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos + off1);\r\n } else if (isNaN(val)) {\r\n writeUint(0, buf, pos + off0);\r\n writeUint(2146959360, buf, pos + off1);\r\n } else if (val > 1.7976931348623157e+308) { // +-Infinity\r\n writeUint(0, buf, pos + off0);\r\n writeUint((sign << 31 | 2146435072) >>> 0, buf, pos + off1);\r\n } else {\r\n var mantissa;\r\n if (val < 2.2250738585072014e-308) { // denormal\r\n mantissa = val / 5e-324;\r\n writeUint(mantissa >>> 0, buf, pos + off0);\r\n writeUint((sign << 31 | mantissa / 4294967296) >>> 0, buf, pos + off1);\r\n } else {\r\n var exponent = Math.floor(Math.log(val) / Math.LN2);\r\n if (exponent === 1024)\r\n exponent = 1023;\r\n mantissa = val * Math.pow(2, -exponent);\r\n writeUint(mantissa * 4503599627370496 >>> 0, buf, pos + off0);\r\n writeUint((sign << 31 | exponent + 1023 << 20 | mantissa * 1048576 & 1048575) >>> 0, buf, pos + off1);\r\n }\r\n }\r\n }\r\n\r\n exports.writeDoubleLE = writeDouble_ieee754.bind(null, writeUintLE, 0, 4);\r\n exports.writeDoubleBE = writeDouble_ieee754.bind(null, writeUintBE, 4, 0);\r\n\r\n function readDouble_ieee754(readUint, off0, off1, buf, pos) {\r\n var lo = readUint(buf, pos + off0),\r\n hi = readUint(buf, pos + off1);\r\n var sign = (hi >> 31) * 2 + 1,\r\n exponent = hi >>> 20 & 2047,\r\n mantissa = 4294967296 * (hi & 1048575) + lo;\r\n return exponent === 2047\r\n ? mantissa\r\n ? NaN\r\n : sign * Infinity\r\n : exponent === 0 // denormal\r\n ? sign * 5e-324 * mantissa\r\n : sign * Math.pow(2, exponent - 1075) * (mantissa + 4503599627370496);\r\n }\r\n\r\n exports.readDoubleLE = readDouble_ieee754.bind(null, readUintLE, 0, 4);\r\n exports.readDoubleBE = readDouble_ieee754.bind(null, readUintBE, 4, 0);\r\n\r\n })();\r\n\r\n return exports;\r\n}\r\n\r\n// uint helpers\r\n\r\nfunction writeUintLE(val, buf, pos) {\r\n buf[pos ] = val & 255;\r\n buf[pos + 1] = val >>> 8 & 255;\r\n buf[pos + 2] = val >>> 16 & 255;\r\n buf[pos + 3] = val >>> 24;\r\n}\r\n\r\nfunction writeUintBE(val, buf, pos) {\r\n buf[pos ] = val >>> 24;\r\n buf[pos + 1] = val >>> 16 & 255;\r\n buf[pos + 2] = val >>> 8 & 255;\r\n buf[pos + 3] = val & 255;\r\n}\r\n\r\nfunction readUintLE(buf, pos) {\r\n return (buf[pos ]\r\n | buf[pos + 1] << 8\r\n | buf[pos + 2] << 16\r\n | buf[pos + 3] << 24) >>> 0;\r\n}\r\n\r\nfunction readUintBE(buf, pos) {\r\n return (buf[pos ] << 24\r\n | buf[pos + 1] << 16\r\n | buf[pos + 2] << 8\r\n | buf[pos + 3]) >>> 0;\r\n}\r\n","\"use strict\";\r\nmodule.exports = inquire;\r\n\r\n/**\r\n * Requires a module only if available.\r\n * @memberof util\r\n * @param {string} moduleName Module to require\r\n * @returns {?Object} Required module if available and not empty, otherwise `null`\r\n */\r\nfunction inquire(moduleName) {\r\n try {\r\n var mod = eval(\"quire\".replace(/^/,\"re\"))(moduleName); // eslint-disable-line no-eval\r\n if (mod && (mod.length || Object.keys(mod).length))\r\n return mod;\r\n } catch (e) {} // eslint-disable-line no-empty\r\n return null;\r\n}\r\n","\"use strict\";\r\nmodule.exports = pool;\r\n\r\n/**\r\n * An allocator as used by {@link util.pool}.\r\n * @typedef PoolAllocator\r\n * @type {function}\r\n * @param {number} size Buffer size\r\n * @returns {Uint8Array} Buffer\r\n */\r\n\r\n/**\r\n * A slicer as used by {@link util.pool}.\r\n * @typedef PoolSlicer\r\n * @type {function}\r\n * @param {number} start Start offset\r\n * @param {number} end End offset\r\n * @returns {Uint8Array} Buffer slice\r\n * @this {Uint8Array}\r\n */\r\n\r\n/**\r\n * A general purpose buffer pool.\r\n * @memberof util\r\n * @function\r\n * @param {PoolAllocator} alloc Allocator\r\n * @param {PoolSlicer} slice Slicer\r\n * @param {number} [size=8192] Slab size\r\n * @returns {PoolAllocator} Pooled allocator\r\n */\r\nfunction pool(alloc, slice, size) {\r\n var SIZE = size || 8192;\r\n var MAX = SIZE >>> 1;\r\n var slab = null;\r\n var offset = SIZE;\r\n return function pool_alloc(size) {\r\n if (size < 1 || size > MAX)\r\n return alloc(size);\r\n if (offset + size > SIZE) {\r\n slab = alloc(SIZE);\r\n offset = 0;\r\n }\r\n var buf = slice.call(slab, offset, offset += size);\r\n if (offset & 7) // align to 32 bit\r\n offset = (offset | 7) + 1;\r\n return buf;\r\n };\r\n}\r\n","\"use strict\";\r\n\r\n/**\r\n * A minimal UTF8 implementation for number arrays.\r\n * @memberof util\r\n * @namespace\r\n */\r\nvar utf8 = exports;\r\n\r\n/**\r\n * Calculates the UTF8 byte length of a string.\r\n * @param {string} string String\r\n * @returns {number} Byte length\r\n */\r\nutf8.length = function utf8_length(string) {\r\n var len = 0,\r\n c = 0;\r\n for (var i = 0; i < string.length; ++i) {\r\n c = string.charCodeAt(i);\r\n if (c < 128)\r\n len += 1;\r\n else if (c < 2048)\r\n len += 2;\r\n else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) {\r\n ++i;\r\n len += 4;\r\n } else\r\n len += 3;\r\n }\r\n return len;\r\n};\r\n\r\n/**\r\n * Reads UTF8 bytes as a string.\r\n * @param {Uint8Array} buffer Source buffer\r\n * @param {number} start Source start\r\n * @param {number} end Source end\r\n * @returns {string} String read\r\n */\r\nutf8.read = function utf8_read(buffer, start, end) {\r\n var len = end - start;\r\n if (len < 1)\r\n return \"\";\r\n var parts = null,\r\n chunk = [],\r\n i = 0, // char offset\r\n t; // temporary\r\n while (start < end) {\r\n t = buffer[start++];\r\n if (t < 128)\r\n chunk[i++] = t;\r\n else if (t > 191 && t < 224)\r\n chunk[i++] = (t & 31) << 6 | buffer[start++] & 63;\r\n else if (t > 239 && t < 365) {\r\n t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000;\r\n chunk[i++] = 0xD800 + (t >> 10);\r\n chunk[i++] = 0xDC00 + (t & 1023);\r\n } else\r\n chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63;\r\n if (i > 8191) {\r\n (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));\r\n i = 0;\r\n }\r\n }\r\n if (parts) {\r\n if (i)\r\n parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));\r\n return parts.join(\"\");\r\n }\r\n return String.fromCharCode.apply(String, chunk.slice(0, i));\r\n};\r\n\r\n/**\r\n * Writes a string as UTF8 bytes.\r\n * @param {string} string Source string\r\n * @param {Uint8Array} buffer Destination buffer\r\n * @param {number} offset Destination offset\r\n * @returns {number} Bytes written\r\n */\r\nutf8.write = function utf8_write(string, buffer, offset) {\r\n var start = offset,\r\n c1, // character 1\r\n c2; // character 2\r\n for (var i = 0; i < string.length; ++i) {\r\n c1 = string.charCodeAt(i);\r\n if (c1 < 128) {\r\n buffer[offset++] = c1;\r\n } else if (c1 < 2048) {\r\n buffer[offset++] = c1 >> 6 | 192;\r\n buffer[offset++] = c1 & 63 | 128;\r\n } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) {\r\n c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF);\r\n ++i;\r\n buffer[offset++] = c1 >> 18 | 240;\r\n buffer[offset++] = c1 >> 12 & 63 | 128;\r\n buffer[offset++] = c1 >> 6 & 63 | 128;\r\n buffer[offset++] = c1 & 63 | 128;\r\n } else {\r\n buffer[offset++] = c1 >> 12 | 224;\r\n buffer[offset++] = c1 >> 6 & 63 | 128;\r\n buffer[offset++] = c1 & 63 | 128;\r\n }\r\n }\r\n return offset - start;\r\n};\r\n","'use strict';\nmodule.exports = balanced;\nfunction balanced(a, b, str) {\n if (a instanceof RegExp) a = maybeMatch(a, str);\n if (b instanceof RegExp) b = maybeMatch(b, str);\n\n var r = range(a, b, str);\n\n return r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + a.length, r[1]),\n post: str.slice(r[1] + b.length)\n };\n}\n\nfunction maybeMatch(reg, str) {\n var m = str.match(reg);\n return m ? m[0] : null;\n}\n\nbalanced.range = range;\nfunction range(a, b, str) {\n var begs, beg, left, right, result;\n var ai = str.indexOf(a);\n var bi = str.indexOf(b, ai + 1);\n var i = ai;\n\n if (ai >= 0 && bi > 0) {\n if(a===b) {\n return [ai, bi];\n }\n begs = [];\n left = str.length;\n\n while (i >= 0 && !result) {\n if (i == ai) {\n begs.push(i);\n ai = str.indexOf(a, i + 1);\n } else if (begs.length == 1) {\n result = [ begs.pop(), bi ];\n } else {\n beg = begs.pop();\n if (beg < left) {\n left = beg;\n right = bi;\n }\n\n bi = str.indexOf(b, i + 1);\n }\n\n i = ai < bi && ai >= 0 ? ai : bi;\n }\n\n if (begs.length) {\n result = [ left, right ];\n }\n }\n\n return result;\n}\n","/* eslint-env browser */\n\n'use strict'\n\nconst browserReadableStreamToIt = require('browser-readablestream-to-it')\n\n/**\n * @param {Blob} blob\n * @returns {AsyncIterable}\n */\nfunction blobToIt (blob) {\n if (typeof blob.stream === 'function') {\n return browserReadableStreamToIt(blob.stream())\n }\n\n // firefox < 69 does not support blob.stream()\n // @ts-ignore - response.body is optional, but in practice it's a stream.\n return browserReadableStreamToIt(new Response(blob).body)\n}\n\nmodule.exports = blobToIt\n","var concatMap = require('concat-map');\nvar balanced = require('balanced-match');\n\nmodule.exports = expandTop;\n\nvar escSlash = '\\0SLASH'+Math.random()+'\\0';\nvar escOpen = '\\0OPEN'+Math.random()+'\\0';\nvar escClose = '\\0CLOSE'+Math.random()+'\\0';\nvar escComma = '\\0COMMA'+Math.random()+'\\0';\nvar escPeriod = '\\0PERIOD'+Math.random()+'\\0';\n\nfunction numeric(str) {\n return parseInt(str, 10) == str\n ? parseInt(str, 10)\n : str.charCodeAt(0);\n}\n\nfunction escapeBraces(str) {\n return str.split('\\\\\\\\').join(escSlash)\n .split('\\\\{').join(escOpen)\n .split('\\\\}').join(escClose)\n .split('\\\\,').join(escComma)\n .split('\\\\.').join(escPeriod);\n}\n\nfunction unescapeBraces(str) {\n return str.split(escSlash).join('\\\\')\n .split(escOpen).join('{')\n .split(escClose).join('}')\n .split(escComma).join(',')\n .split(escPeriod).join('.');\n}\n\n\n// Basically just str.split(\",\"), but handling cases\n// where we have nested braced sections, which should be\n// treated as individual members, like {a,{b,c},d}\nfunction parseCommaParts(str) {\n if (!str)\n return [''];\n\n var parts = [];\n var m = balanced('{', '}', str);\n\n if (!m)\n return str.split(',');\n\n var pre = m.pre;\n var body = m.body;\n var post = m.post;\n var p = pre.split(',');\n\n p[p.length-1] += '{' + body + '}';\n var postParts = parseCommaParts(post);\n if (post.length) {\n p[p.length-1] += postParts.shift();\n p.push.apply(p, postParts);\n }\n\n parts.push.apply(parts, p);\n\n return parts;\n}\n\nfunction expandTop(str) {\n if (!str)\n return [];\n\n // I don't know why Bash 4.3 does this, but it does.\n // Anything starting with {} will have the first two bytes preserved\n // but *only* at the top level, so {},a}b will not expand to anything,\n // but a{},b}c will be expanded to [a}c,abc].\n // One could argue that this is a bug in Bash, but since the goal of\n // this module is to match Bash's rules, we escape a leading {}\n if (str.substr(0, 2) === '{}') {\n str = '\\\\{\\\\}' + str.substr(2);\n }\n\n return expand(escapeBraces(str), true).map(unescapeBraces);\n}\n\nfunction identity(e) {\n return e;\n}\n\nfunction embrace(str) {\n return '{' + str + '}';\n}\nfunction isPadded(el) {\n return /^-?0\\d/.test(el);\n}\n\nfunction lte(i, y) {\n return i <= y;\n}\nfunction gte(i, y) {\n return i >= y;\n}\n\nfunction expand(str, isTop) {\n var expansions = [];\n\n var m = balanced('{', '}', str);\n if (!m || /\\$$/.test(m.pre)) return [str];\n\n var isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body);\n var isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(m.body);\n var isSequence = isNumericSequence || isAlphaSequence;\n var isOptions = m.body.indexOf(',') >= 0;\n if (!isSequence && !isOptions) {\n // {a},b}\n if (m.post.match(/,.*\\}/)) {\n str = m.pre + '{' + m.body + escClose + m.post;\n return expand(str);\n }\n return [str];\n }\n\n var n;\n if (isSequence) {\n n = m.body.split(/\\.\\./);\n } else {\n n = parseCommaParts(m.body);\n if (n.length === 1) {\n // x{{a,b}}y ==> x{a}y x{b}y\n n = expand(n[0], false).map(embrace);\n if (n.length === 1) {\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n return post.map(function(p) {\n return m.pre + n[0] + p;\n });\n }\n }\n }\n\n // at this point, n is the parts, and we know it's not a comma set\n // with a single entry.\n\n // no need to expand pre, since it is guaranteed to be free of brace-sets\n var pre = m.pre;\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n\n var N;\n\n if (isSequence) {\n var x = numeric(n[0]);\n var y = numeric(n[1]);\n var width = Math.max(n[0].length, n[1].length)\n var incr = n.length == 3\n ? Math.abs(numeric(n[2]))\n : 1;\n var test = lte;\n var reverse = y < x;\n if (reverse) {\n incr *= -1;\n test = gte;\n }\n var pad = n.some(isPadded);\n\n N = [];\n\n for (var i = x; test(i, y); i += incr) {\n var c;\n if (isAlphaSequence) {\n c = String.fromCharCode(i);\n if (c === '\\\\')\n c = '';\n } else {\n c = String(i);\n if (pad) {\n var need = width - c.length;\n if (need > 0) {\n var z = new Array(need + 1).join('0');\n if (i < 0)\n c = '-' + z + c.slice(1);\n else\n c = z + c;\n }\n }\n }\n N.push(c);\n }\n } else {\n N = concatMap(n, function(el) { return expand(el, false) });\n }\n\n for (var j = 0; j < N.length; j++) {\n for (var k = 0; k < post.length; k++) {\n var expansion = pre + N[j] + post[k];\n if (!isTop || isSequence || expansion)\n expansions.push(expansion);\n }\n }\n\n return expansions;\n}\n\n","'use strict'\n\n/**\n * Turns a browser readable stream into an async iterable. Async iteration over\n * returned iterable will lock give stream, preventing any other consumer from\n * acquiring a reader. The lock will be released if iteration loop is broken. To\n * prevent stream cancelling optional `{ preventCancel: true }` could be passed\n * as a second argument.\n * @template T\n * @param {ReadableStream} stream\n * @param {Object} [options]\n * @param {boolean} [options.preventCancel=boolean]\n * @returns {AsyncIterable}\n */\nasync function * browserReadableStreamToIt (stream, options = {}) {\n const reader = stream.getReader()\n\n try {\n while (true) {\n const result = await reader.read()\n\n if (result.done) {\n return\n }\n\n yield result.value\n }\n } finally {\n if (options.preventCancel !== true) {\n reader.cancel()\n }\n\n reader.releaseLock()\n }\n}\n\nmodule.exports = browserReadableStreamToIt\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar splitter = require('./splitter.js');\nvar joiner = require('./joiner.js');\n\n\n\nexports.TreewalkCarSplitter = splitter.TreewalkCarSplitter;\nexports.TreewalkCarJoiner = joiner.TreewalkCarJoiner;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar car = require('@ipld/car');\n\nclass TreewalkCarJoiner {\n constructor(cars) {\n this._cars = Array.from(cars);\n if (!this._cars.length)\n throw new Error('missing CARs');\n }\n async *car() {\n const reader = this._cars[0];\n const roots = await reader.getRoots();\n const {writer, out} = car.CarWriter.create(roots);\n const writeCar = async () => {\n const written = new Set();\n const writeBlocks = async reader => {\n for await (const b of reader.blocks()) {\n if (written.has(b.cid.toString()))\n continue;\n await writer.put(b);\n written.add(b.cid.toString());\n }\n };\n try {\n await writeBlocks(reader);\n for (const reader of this._cars.slice(1)) {\n await writeBlocks(reader);\n }\n } catch (err) {\n console.error(err);\n } finally {\n await writer.close();\n }\n };\n writeCar();\n yield* out;\n }\n}\n\nexports.TreewalkCarJoiner = TreewalkCarJoiner;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar car = require('@ipld/car');\nvar block = require('multiformats/block');\nvar raw = require('multiformats/codecs/raw');\nvar dagCbor = require('@ipld/dag-cbor');\nvar pb = require('@ipld/dag-pb');\n\nfunction _interopNamespace(e) {\n if (e && e.__esModule) return e;\n var n = Object.create(null);\n if (e) {\n Object.keys(e).forEach(function (k) {\n if (k !== 'default') {\n var d = Object.getOwnPropertyDescriptor(e, k);\n Object.defineProperty(n, k, d.get ? d : {\n enumerable: true,\n get: function () {\n return e[k];\n }\n });\n }\n });\n }\n n['default'] = e;\n return Object.freeze(n);\n}\n\nvar raw__namespace = /*#__PURE__*/_interopNamespace(raw);\nvar dagCbor__namespace = /*#__PURE__*/_interopNamespace(dagCbor);\nvar pb__namespace = /*#__PURE__*/_interopNamespace(pb);\n\nclass TreewalkCarSplitter {\n constructor(reader, targetSize, options = {}) {\n if (typeof targetSize !== 'number' || targetSize <= 0) {\n throw new Error('invalid target chunk size');\n }\n this._reader = reader;\n this._targetSize = targetSize;\n this._decoders = [\n pb__namespace,\n raw__namespace,\n dagCbor__namespace,\n ...options.decoders || []\n ];\n }\n async *cars() {\n const roots = await this._reader.getRoots();\n if (roots.length !== 1)\n throw new Error(`unexpected number of roots: ${ roots.length }`);\n let channel;\n for await (const val of this._cars(roots[0])) {\n channel = val.channel;\n if (val.out)\n yield val.out;\n }\n if (!channel) {\n throw new Error('missing CAR writer channel');\n }\n channel.writer.close();\n yield channel.out;\n }\n async _get(cid) {\n const rawBlock = await this._reader.get(cid);\n if (!rawBlock)\n throw new Error(`missing block for ${ cid }`);\n const {bytes} = rawBlock;\n const decoder = this._decoders.find(d => d.code === cid.code);\n if (!decoder)\n throw new Error(`missing decoder for ${ cid.code }`);\n return new block.Block({\n cid,\n bytes,\n value: decoder.decode(bytes)\n });\n }\n async *_cars(cid, parents = [], channel = undefined) {\n const block = await this._get(cid);\n channel = channel || Object.assign(car.CarWriter.create(cid), { size: 0 });\n if (channel.size > 0 && channel.size + block.bytes.byteLength >= this._targetSize) {\n channel.writer.close();\n const {out} = channel;\n channel = newCar(parents);\n yield {\n channel,\n out\n };\n }\n parents = parents.concat(block);\n channel.size += block.bytes.byteLength;\n channel.writer.put(block);\n for (const [, cid] of block.links()) {\n for await (const val of this._cars(cid, parents, channel)) {\n channel = val.channel;\n yield val;\n }\n }\n if (!channel) {\n throw new Error('missing CAR writer channel');\n }\n yield { channel };\n }\n static async fromIterable(iterable, targetSize, options) {\n const reader = await car.CarReader.fromIterable(iterable);\n return new TreewalkCarSplitter(reader, targetSize, options);\n }\n static async fromBlob(blob, targetSize, options) {\n const buffer = await blob.arrayBuffer();\n const reader = await car.CarReader.fromBytes(new Uint8Array(buffer));\n return new TreewalkCarSplitter(reader, targetSize, options);\n }\n}\nfunction newCar(parents) {\n const ch = Object.assign(car.CarWriter.create(parents[0].cid), { size: parents.reduce((size, b) => size + b.bytes.byteLength, 0) });\n for (const b of parents) {\n ch.writer.put(b);\n }\n return ch;\n}\n\nexports.TreewalkCarSplitter = TreewalkCarSplitter;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar encode = require('./lib/encode.js');\nvar decode = require('./lib/decode.js');\nvar token = require('./lib/token.js');\n\n\n\nexports.encode = encode.encode;\nexports.decode = decode.decode;\nexports.Token = token.Token;\nexports.Type = token.Type;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar common = require('./common.js');\n\nconst uintBoundaries = [\n 24,\n 256,\n 65536,\n 4294967296,\n BigInt('18446744073709551616')\n];\nfunction readUint8(data, offset, options) {\n common.assertEnoughData(data, offset, 1);\n const value = data[offset];\n if (options.strict === true && value < uintBoundaries[0]) {\n throw new Error(`${ common.decodeErrPrefix } integer encoded in more bytes than necessary (strict decode)`);\n }\n return value;\n}\nfunction readUint16(data, offset, options) {\n common.assertEnoughData(data, offset, 2);\n const value = data[offset] << 8 | data[offset + 1];\n if (options.strict === true && value < uintBoundaries[1]) {\n throw new Error(`${ common.decodeErrPrefix } integer encoded in more bytes than necessary (strict decode)`);\n }\n return value;\n}\nfunction readUint32(data, offset, options) {\n common.assertEnoughData(data, offset, 4);\n const value = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3];\n if (options.strict === true && value < uintBoundaries[2]) {\n throw new Error(`${ common.decodeErrPrefix } integer encoded in more bytes than necessary (strict decode)`);\n }\n return value;\n}\nfunction readUint64(data, offset, options) {\n common.assertEnoughData(data, offset, 8);\n const hi = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3];\n const lo = data[offset + 4] * 16777216 + (data[offset + 5] << 16) + (data[offset + 6] << 8) + data[offset + 7];\n const value = (BigInt(hi) << BigInt(32)) + BigInt(lo);\n if (options.strict === true && value < uintBoundaries[3]) {\n throw new Error(`${ common.decodeErrPrefix } integer encoded in more bytes than necessary (strict decode)`);\n }\n if (value <= Number.MAX_SAFE_INTEGER) {\n return Number(value);\n }\n if (options.allowBigInt === true) {\n return value;\n }\n throw new Error(`${ common.decodeErrPrefix } integers outside of the safe integer range are not supported`);\n}\nfunction decodeUint8(data, pos, _minor, options) {\n return new token.Token(token.Type.uint, readUint8(data, pos + 1, options), 2);\n}\nfunction decodeUint16(data, pos, _minor, options) {\n return new token.Token(token.Type.uint, readUint16(data, pos + 1, options), 3);\n}\nfunction decodeUint32(data, pos, _minor, options) {\n return new token.Token(token.Type.uint, readUint32(data, pos + 1, options), 5);\n}\nfunction decodeUint64(data, pos, _minor, options) {\n return new token.Token(token.Type.uint, readUint64(data, pos + 1, options), 9);\n}\nfunction encodeUint(buf, token) {\n return encodeUintValue(buf, 0, token.value);\n}\nfunction encodeUintValue(buf, major, uint) {\n if (uint < uintBoundaries[0]) {\n const nuint = Number(uint);\n buf.push([major | nuint]);\n } else if (uint < uintBoundaries[1]) {\n const nuint = Number(uint);\n buf.push([\n major | 24,\n nuint\n ]);\n } else if (uint < uintBoundaries[2]) {\n const nuint = Number(uint);\n buf.push([\n major | 25,\n nuint >>> 8,\n nuint & 255\n ]);\n } else if (uint < uintBoundaries[3]) {\n const nuint = Number(uint);\n buf.push([\n major | 26,\n nuint >>> 24 & 255,\n nuint >>> 16 & 255,\n nuint >>> 8 & 255,\n nuint & 255\n ]);\n } else {\n const buint = BigInt(uint);\n if (buint < uintBoundaries[4]) {\n const set = [\n major | 27,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0\n ];\n let lo = Number(buint & BigInt(4294967295));\n let hi = Number(buint >> BigInt(32) & BigInt(4294967295));\n set[8] = lo & 255;\n lo = lo >> 8;\n set[7] = lo & 255;\n lo = lo >> 8;\n set[6] = lo & 255;\n lo = lo >> 8;\n set[5] = lo & 255;\n set[4] = hi & 255;\n hi = hi >> 8;\n set[3] = hi & 255;\n hi = hi >> 8;\n set[2] = hi & 255;\n hi = hi >> 8;\n set[1] = hi & 255;\n buf.push(set);\n } else {\n throw new Error(`${ common.decodeErrPrefix } encountered BigInt larger than allowable range`);\n }\n }\n}\nencodeUint.encodedSize = function encodedSize(token) {\n return encodeUintValue.encodedSize(token.value);\n};\nencodeUintValue.encodedSize = function encodedSize(uint) {\n if (uint < uintBoundaries[0]) {\n return 1;\n }\n if (uint < uintBoundaries[1]) {\n return 2;\n }\n if (uint < uintBoundaries[2]) {\n return 3;\n }\n if (uint < uintBoundaries[3]) {\n return 5;\n }\n return 9;\n};\nencodeUint.compareTokens = function compareTokens(tok1, tok2) {\n return tok1.value < tok2.value ? -1 : tok1.value > tok2.value ? 1 : 0;\n};\n\nexports.decodeUint16 = decodeUint16;\nexports.decodeUint32 = decodeUint32;\nexports.decodeUint64 = decodeUint64;\nexports.decodeUint8 = decodeUint8;\nexports.encodeUint = encodeUint;\nexports.encodeUintValue = encodeUintValue;\nexports.readUint16 = readUint16;\nexports.readUint32 = readUint32;\nexports.readUint64 = readUint64;\nexports.readUint8 = readUint8;\nexports.uintBoundaries = uintBoundaries;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar _0uint = require('./0uint.js');\nvar common = require('./common.js');\n\nfunction decodeNegint8(data, pos, _minor, options) {\n return new token.Token(token.Type.negint, -1 - _0uint.readUint8(data, pos + 1, options), 2);\n}\nfunction decodeNegint16(data, pos, _minor, options) {\n return new token.Token(token.Type.negint, -1 - _0uint.readUint16(data, pos + 1, options), 3);\n}\nfunction decodeNegint32(data, pos, _minor, options) {\n return new token.Token(token.Type.negint, -1 - _0uint.readUint32(data, pos + 1, options), 5);\n}\nconst neg1b = BigInt(-1);\nconst pos1b = BigInt(1);\nfunction decodeNegint64(data, pos, _minor, options) {\n const int = _0uint.readUint64(data, pos + 1, options);\n if (typeof int !== 'bigint') {\n const value = -1 - int;\n if (value >= Number.MIN_SAFE_INTEGER) {\n return new token.Token(token.Type.negint, value, 9);\n }\n }\n if (options.allowBigInt !== true) {\n throw new Error(`${ common.decodeErrPrefix } integers outside of the safe integer range are not supported`);\n }\n return new token.Token(token.Type.negint, neg1b - BigInt(int), 9);\n}\nfunction encodeNegint(buf, token) {\n const negint = token.value;\n const unsigned = typeof negint === 'bigint' ? negint * neg1b - pos1b : negint * -1 - 1;\n _0uint.encodeUintValue(buf, token.type.majorEncoded, unsigned);\n}\nencodeNegint.encodedSize = function encodedSize(token) {\n const negint = token.value;\n const unsigned = typeof negint === 'bigint' ? negint * neg1b - pos1b : negint * -1 - 1;\n if (unsigned < _0uint.uintBoundaries[0]) {\n return 1;\n }\n if (unsigned < _0uint.uintBoundaries[1]) {\n return 2;\n }\n if (unsigned < _0uint.uintBoundaries[2]) {\n return 3;\n }\n if (unsigned < _0uint.uintBoundaries[3]) {\n return 5;\n }\n return 9;\n};\nencodeNegint.compareTokens = function compareTokens(tok1, tok2) {\n return tok1.value < tok2.value ? 1 : tok1.value > tok2.value ? -1 : 0;\n};\n\nexports.decodeNegint16 = decodeNegint16;\nexports.decodeNegint32 = decodeNegint32;\nexports.decodeNegint64 = decodeNegint64;\nexports.decodeNegint8 = decodeNegint8;\nexports.encodeNegint = encodeNegint;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar common = require('./common.js');\nvar _0uint = require('./0uint.js');\nvar byteUtils = require('./byte-utils.js');\n\nfunction toToken(data, pos, prefix, length) {\n common.assertEnoughData(data, pos, prefix + length);\n const buf = byteUtils.slice(data, pos + prefix, pos + prefix + length);\n return new token.Token(token.Type.bytes, buf, prefix + length);\n}\nfunction decodeBytesCompact(data, pos, minor, _options) {\n return toToken(data, pos, 1, minor);\n}\nfunction decodeBytes8(data, pos, _minor, options) {\n return toToken(data, pos, 2, _0uint.readUint8(data, pos + 1, options));\n}\nfunction decodeBytes16(data, pos, _minor, options) {\n return toToken(data, pos, 3, _0uint.readUint16(data, pos + 1, options));\n}\nfunction decodeBytes32(data, pos, _minor, options) {\n return toToken(data, pos, 5, _0uint.readUint32(data, pos + 1, options));\n}\nfunction decodeBytes64(data, pos, _minor, options) {\n const l = _0uint.readUint64(data, pos + 1, options);\n if (typeof l === 'bigint') {\n throw new Error(`${ common.decodeErrPrefix } 64-bit integer bytes lengths not supported`);\n }\n return toToken(data, pos, 9, l);\n}\nfunction tokenBytes(token$1) {\n if (token$1.encodedBytes === undefined) {\n token$1.encodedBytes = token$1.type === token.Type.string ? byteUtils.fromString(token$1.value) : token$1.value;\n }\n return token$1.encodedBytes;\n}\nfunction encodeBytes(buf, token) {\n const bytes = tokenBytes(token);\n _0uint.encodeUintValue(buf, token.type.majorEncoded, bytes.length);\n buf.push(bytes);\n}\nencodeBytes.encodedSize = function encodedSize(token) {\n const bytes = tokenBytes(token);\n return _0uint.encodeUintValue.encodedSize(bytes.length) + bytes.length;\n};\nencodeBytes.compareTokens = function compareTokens(tok1, tok2) {\n return compareBytes(tokenBytes(tok1), tokenBytes(tok2));\n};\nfunction compareBytes(b1, b2) {\n return b1.length < b2.length ? -1 : b1.length > b2.length ? 1 : byteUtils.compare(b1, b2);\n}\n\nexports.compareBytes = compareBytes;\nexports.decodeBytes16 = decodeBytes16;\nexports.decodeBytes32 = decodeBytes32;\nexports.decodeBytes64 = decodeBytes64;\nexports.decodeBytes8 = decodeBytes8;\nexports.decodeBytesCompact = decodeBytesCompact;\nexports.encodeBytes = encodeBytes;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar common = require('./common.js');\nvar _0uint = require('./0uint.js');\nvar _2bytes = require('./2bytes.js');\nvar byteUtils = require('./byte-utils.js');\n\nfunction toToken(data, pos, prefix, length) {\n const totLength = prefix + length;\n common.assertEnoughData(data, pos, totLength);\n return new token.Token(token.Type.string, byteUtils.toString(data, pos + prefix, pos + totLength), totLength);\n}\nfunction decodeStringCompact(data, pos, minor, _options) {\n return toToken(data, pos, 1, minor);\n}\nfunction decodeString8(data, pos, _minor, options) {\n return toToken(data, pos, 2, _0uint.readUint8(data, pos + 1, options));\n}\nfunction decodeString16(data, pos, _minor, options) {\n return toToken(data, pos, 3, _0uint.readUint16(data, pos + 1, options));\n}\nfunction decodeString32(data, pos, _minor, options) {\n return toToken(data, pos, 5, _0uint.readUint32(data, pos + 1, options));\n}\nfunction decodeString64(data, pos, _minor, options) {\n const l = _0uint.readUint64(data, pos + 1, options);\n if (typeof l === 'bigint') {\n throw new Error(`${ common.decodeErrPrefix } 64-bit integer string lengths not supported`);\n }\n return toToken(data, pos, 9, l);\n}\nconst encodeString = _2bytes.encodeBytes;\n\nexports.decodeString16 = decodeString16;\nexports.decodeString32 = decodeString32;\nexports.decodeString64 = decodeString64;\nexports.decodeString8 = decodeString8;\nexports.decodeStringCompact = decodeStringCompact;\nexports.encodeString = encodeString;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar _0uint = require('./0uint.js');\nvar common = require('./common.js');\n\nfunction toToken(_data, _pos, prefix, length) {\n return new token.Token(token.Type.array, length, prefix);\n}\nfunction decodeArrayCompact(data, pos, minor, _options) {\n return toToken(data, pos, 1, minor);\n}\nfunction decodeArray8(data, pos, _minor, options) {\n return toToken(data, pos, 2, _0uint.readUint8(data, pos + 1, options));\n}\nfunction decodeArray16(data, pos, _minor, options) {\n return toToken(data, pos, 3, _0uint.readUint16(data, pos + 1, options));\n}\nfunction decodeArray32(data, pos, _minor, options) {\n return toToken(data, pos, 5, _0uint.readUint32(data, pos + 1, options));\n}\nfunction decodeArray64(data, pos, _minor, options) {\n const l = _0uint.readUint64(data, pos + 1, options);\n if (typeof l === 'bigint') {\n throw new Error(`${ common.decodeErrPrefix } 64-bit integer array lengths not supported`);\n }\n return toToken(data, pos, 9, l);\n}\nfunction decodeArrayIndefinite(data, pos, _minor, options) {\n if (options.allowIndefinite === false) {\n throw new Error(`${ common.decodeErrPrefix } indefinite length items not allowed`);\n }\n return toToken(data, pos, 1, Infinity);\n}\nfunction encodeArray(buf, token$1) {\n _0uint.encodeUintValue(buf, token.Type.array.majorEncoded, token$1.value);\n}\nencodeArray.compareTokens = _0uint.encodeUint.compareTokens;\n\nexports.decodeArray16 = decodeArray16;\nexports.decodeArray32 = decodeArray32;\nexports.decodeArray64 = decodeArray64;\nexports.decodeArray8 = decodeArray8;\nexports.decodeArrayCompact = decodeArrayCompact;\nexports.decodeArrayIndefinite = decodeArrayIndefinite;\nexports.encodeArray = encodeArray;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar _0uint = require('./0uint.js');\nvar common = require('./common.js');\n\nfunction toToken(_data, _pos, prefix, length) {\n return new token.Token(token.Type.map, length, prefix);\n}\nfunction decodeMapCompact(data, pos, minor, _options) {\n return toToken(data, pos, 1, minor);\n}\nfunction decodeMap8(data, pos, _minor, options) {\n return toToken(data, pos, 2, _0uint.readUint8(data, pos + 1, options));\n}\nfunction decodeMap16(data, pos, _minor, options) {\n return toToken(data, pos, 3, _0uint.readUint16(data, pos + 1, options));\n}\nfunction decodeMap32(data, pos, _minor, options) {\n return toToken(data, pos, 5, _0uint.readUint32(data, pos + 1, options));\n}\nfunction decodeMap64(data, pos, _minor, options) {\n const l = _0uint.readUint64(data, pos + 1, options);\n if (typeof l === 'bigint') {\n throw new Error(`${ common.decodeErrPrefix } 64-bit integer map lengths not supported`);\n }\n return toToken(data, pos, 9, l);\n}\nfunction decodeMapIndefinite(data, pos, _minor, options) {\n if (options.allowIndefinite === false) {\n throw new Error(`${ common.decodeErrPrefix } indefinite length items not allowed`);\n }\n return toToken(data, pos, 1, Infinity);\n}\nfunction encodeMap(buf, token$1) {\n _0uint.encodeUintValue(buf, token.Type.map.majorEncoded, token$1.value);\n}\nencodeMap.compareTokens = _0uint.encodeUint.compareTokens;\n\nexports.decodeMap16 = decodeMap16;\nexports.decodeMap32 = decodeMap32;\nexports.decodeMap64 = decodeMap64;\nexports.decodeMap8 = decodeMap8;\nexports.decodeMapCompact = decodeMapCompact;\nexports.decodeMapIndefinite = decodeMapIndefinite;\nexports.encodeMap = encodeMap;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar _0uint = require('./0uint.js');\n\nfunction decodeTagCompact(_data, _pos, minor, _options) {\n return new token.Token(token.Type.tag, minor, 1);\n}\nfunction decodeTag8(data, pos, _minor, options) {\n return new token.Token(token.Type.tag, _0uint.readUint8(data, pos + 1, options), 2);\n}\nfunction decodeTag16(data, pos, _minor, options) {\n return new token.Token(token.Type.tag, _0uint.readUint16(data, pos + 1, options), 3);\n}\nfunction decodeTag32(data, pos, _minor, options) {\n return new token.Token(token.Type.tag, _0uint.readUint32(data, pos + 1, options), 5);\n}\nfunction decodeTag64(data, pos, _minor, options) {\n return new token.Token(token.Type.tag, _0uint.readUint64(data, pos + 1, options), 9);\n}\nfunction encodeTag(buf, token$1) {\n _0uint.encodeUintValue(buf, token.Type.tag.majorEncoded, token$1.value);\n}\nencodeTag.compareTokens = _0uint.encodeUint.compareTokens;\n\nexports.decodeTag16 = decodeTag16;\nexports.decodeTag32 = decodeTag32;\nexports.decodeTag64 = decodeTag64;\nexports.decodeTag8 = decodeTag8;\nexports.decodeTagCompact = decodeTagCompact;\nexports.encodeTag = encodeTag;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar common = require('./common.js');\nvar _0uint = require('./0uint.js');\n\nconst MINOR_FALSE = 20;\nconst MINOR_TRUE = 21;\nconst MINOR_NULL = 22;\nconst MINOR_UNDEFINED = 23;\nfunction decodeUndefined(_data, _pos, _minor, options) {\n if (options.allowUndefined === false) {\n throw new Error(`${ common.decodeErrPrefix } undefined values are not supported`);\n }\n return new token.Token(token.Type.undefined, undefined, 1);\n}\nfunction decodeBreak(_data, _pos, _minor, options) {\n if (options.allowIndefinite === false) {\n throw new Error(`${ common.decodeErrPrefix } indefinite length items not allowed`);\n }\n return new token.Token(token.Type.break, undefined, 1);\n}\nfunction createToken(value, bytes, options) {\n if (options) {\n if (options.allowNaN === false && Number.isNaN(value)) {\n throw new Error(`${ common.decodeErrPrefix } NaN values are not supported`);\n }\n if (options.allowInfinity === false && (value === Infinity || value === -Infinity)) {\n throw new Error(`${ common.decodeErrPrefix } Infinity values are not supported`);\n }\n }\n return new token.Token(token.Type.float, value, bytes);\n}\nfunction decodeFloat16(data, pos, _minor, options) {\n return createToken(readFloat16(data, pos + 1), 3, options);\n}\nfunction decodeFloat32(data, pos, _minor, options) {\n return createToken(readFloat32(data, pos + 1), 5, options);\n}\nfunction decodeFloat64(data, pos, _minor, options) {\n return createToken(readFloat64(data, pos + 1), 9, options);\n}\nfunction encodeFloat(buf, token$1, options) {\n const float = token$1.value;\n if (float === false) {\n buf.push([token.Type.float.majorEncoded | MINOR_FALSE]);\n } else if (float === true) {\n buf.push([token.Type.float.majorEncoded | MINOR_TRUE]);\n } else if (float === null) {\n buf.push([token.Type.float.majorEncoded | MINOR_NULL]);\n } else if (float === undefined) {\n buf.push([token.Type.float.majorEncoded | MINOR_UNDEFINED]);\n } else {\n let decoded;\n let success = false;\n if (!options || options.float64 !== true) {\n encodeFloat16(float);\n decoded = readFloat16(ui8a, 1);\n if (float === decoded || Number.isNaN(float)) {\n ui8a[0] = 249;\n buf.push(ui8a.slice(0, 3));\n success = true;\n } else {\n encodeFloat32(float);\n decoded = readFloat32(ui8a, 1);\n if (float === decoded) {\n ui8a[0] = 250;\n buf.push(ui8a.slice(0, 5));\n success = true;\n }\n }\n }\n if (!success) {\n encodeFloat64(float);\n decoded = readFloat64(ui8a, 1);\n ui8a[0] = 251;\n buf.push(ui8a.slice(0, 9));\n }\n }\n}\nencodeFloat.encodedSize = function encodedSize(token, options) {\n const float = token.value;\n if (float === false || float === true || float === null || float === undefined) {\n return 1;\n }\n let decoded;\n if (!options || options.float64 !== true) {\n encodeFloat16(float);\n decoded = readFloat16(ui8a, 1);\n if (float === decoded || Number.isNaN(float)) {\n return 3;\n }\n encodeFloat32(float);\n decoded = readFloat32(ui8a, 1);\n if (float === decoded) {\n return 5;\n }\n }\n return 9;\n};\nconst buffer = new ArrayBuffer(9);\nconst dataView = new DataView(buffer, 1);\nconst ui8a = new Uint8Array(buffer, 0);\nfunction encodeFloat16(inp) {\n if (inp === Infinity) {\n dataView.setUint16(0, 31744, false);\n } else if (inp === -Infinity) {\n dataView.setUint16(0, 64512, false);\n } else if (Number.isNaN(inp)) {\n dataView.setUint16(0, 32256, false);\n } else {\n dataView.setFloat32(0, inp);\n const valu32 = dataView.getUint32(0);\n const exponent = (valu32 & 2139095040) >> 23;\n const mantissa = valu32 & 8388607;\n if (exponent === 255) {\n dataView.setUint16(0, 31744, false);\n } else if (exponent === 0) {\n dataView.setUint16(0, (inp & 2147483648) >> 16 | mantissa >> 13, false);\n } else {\n const logicalExponent = exponent - 127;\n if (logicalExponent < -24) {\n dataView.setUint16(0, 0);\n } else if (logicalExponent < -14) {\n dataView.setUint16(0, (valu32 & 2147483648) >> 16 | 1 << 24 + logicalExponent, false);\n } else {\n dataView.setUint16(0, (valu32 & 2147483648) >> 16 | logicalExponent + 15 << 10 | mantissa >> 13, false);\n }\n }\n }\n}\nfunction readFloat16(ui8a, pos) {\n if (ui8a.length - pos < 2) {\n throw new Error(`${ common.decodeErrPrefix } not enough data for float16`);\n }\n const half = (ui8a[pos] << 8) + ui8a[pos + 1];\n if (half === 31744) {\n return Infinity;\n }\n if (half === 64512) {\n return -Infinity;\n }\n if (half === 32256) {\n return NaN;\n }\n const exp = half >> 10 & 31;\n const mant = half & 1023;\n let val;\n if (exp === 0) {\n val = mant * 2 ** -24;\n } else if (exp !== 31) {\n val = (mant + 1024) * 2 ** (exp - 25);\n } else {\n val = mant === 0 ? Infinity : NaN;\n }\n return half & 32768 ? -val : val;\n}\nfunction encodeFloat32(inp) {\n dataView.setFloat32(0, inp, false);\n}\nfunction readFloat32(ui8a, pos) {\n if (ui8a.length - pos < 4) {\n throw new Error(`${ common.decodeErrPrefix } not enough data for float32`);\n }\n const offset = (ui8a.byteOffset || 0) + pos;\n return new DataView(ui8a.buffer, offset, 4).getFloat32(0, false);\n}\nfunction encodeFloat64(inp) {\n dataView.setFloat64(0, inp, false);\n}\nfunction readFloat64(ui8a, pos) {\n if (ui8a.length - pos < 8) {\n throw new Error(`${ common.decodeErrPrefix } not enough data for float64`);\n }\n const offset = (ui8a.byteOffset || 0) + pos;\n return new DataView(ui8a.buffer, offset, 8).getFloat64(0, false);\n}\nencodeFloat.compareTokens = _0uint.encodeUint.compareTokens;\n\nexports.decodeBreak = decodeBreak;\nexports.decodeFloat16 = decodeFloat16;\nexports.decodeFloat32 = decodeFloat32;\nexports.decodeFloat64 = decodeFloat64;\nexports.decodeUndefined = decodeUndefined;\nexports.encodeFloat = encodeFloat;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar byteUtils = require('./byte-utils.js');\n\nconst defaultChunkSize = 256;\nclass Bl {\n constructor(chunkSize = defaultChunkSize) {\n this.chunkSize = chunkSize;\n this.cursor = 0;\n this.maxCursor = -1;\n this.chunks = [];\n this._initReuseChunk = null;\n }\n reset() {\n this.chunks = [];\n this.cursor = 0;\n this.maxCursor = -1;\n if (this._initReuseChunk !== null) {\n this.chunks.push(this._initReuseChunk);\n this.maxCursor = this._initReuseChunk.length - 1;\n }\n }\n push(bytes) {\n let topChunk = this.chunks[this.chunks.length - 1];\n const newMax = this.cursor + bytes.length;\n if (newMax <= this.maxCursor + 1) {\n const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1;\n topChunk.set(bytes, chunkPos);\n } else {\n if (topChunk) {\n const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1;\n if (chunkPos < topChunk.length) {\n this.chunks[this.chunks.length - 1] = topChunk.subarray(0, chunkPos);\n this.maxCursor = this.cursor - 1;\n }\n }\n if (bytes.length < 64 && bytes.length < this.chunkSize) {\n topChunk = byteUtils.alloc(this.chunkSize);\n this.chunks.push(topChunk);\n this.maxCursor += topChunk.length;\n if (this._initReuseChunk === null) {\n this._initReuseChunk = topChunk;\n }\n topChunk.set(bytes, 0);\n } else {\n this.chunks.push(bytes);\n this.maxCursor += bytes.length;\n }\n }\n this.cursor += bytes.length;\n }\n toBytes(reset = false) {\n let byts;\n if (this.chunks.length === 1) {\n const chunk = this.chunks[0];\n if (reset && this.cursor > chunk.length / 2) {\n byts = this.cursor === chunk.length ? chunk : chunk.subarray(0, this.cursor);\n this._initReuseChunk = null;\n this.chunks = [];\n } else {\n byts = byteUtils.slice(chunk, 0, this.cursor);\n }\n } else {\n byts = byteUtils.concat(this.chunks, this.cursor);\n }\n if (reset) {\n this.reset();\n }\n return byts;\n }\n}\n\nexports.Bl = Bl;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst useBuffer = globalThis.process && !globalThis.process.browser && globalThis.Buffer && typeof globalThis.Buffer.isBuffer === 'function';\nconst textDecoder = new TextDecoder();\nconst textEncoder = new TextEncoder();\nfunction isBuffer(buf) {\n return useBuffer && globalThis.Buffer.isBuffer(buf);\n}\nfunction asU8A(buf) {\n if (!(buf instanceof Uint8Array)) {\n return Uint8Array.from(buf);\n }\n return isBuffer(buf) ? new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength) : buf;\n}\nconst toString = useBuffer ? (bytes, start, end) => {\n return end - start > 64 ? globalThis.Buffer.from(bytes.subarray(start, end)).toString('utf8') : utf8Slice(bytes, start, end);\n} : (bytes, start, end) => {\n return end - start > 64 ? textDecoder.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end);\n};\nconst fromString = useBuffer ? string => {\n return string.length > 64 ? globalThis.Buffer.from(string) : utf8ToBytes(string);\n} : string => {\n return string.length > 64 ? textEncoder.encode(string) : utf8ToBytes(string);\n};\nconst fromArray = arr => {\n return Uint8Array.from(arr);\n};\nconst slice = useBuffer ? (bytes, start, end) => {\n if (isBuffer(bytes)) {\n return new Uint8Array(bytes.subarray(start, end));\n }\n return bytes.slice(start, end);\n} : (bytes, start, end) => {\n return bytes.slice(start, end);\n};\nconst concat = useBuffer ? (chunks, length) => {\n chunks = chunks.map(c => c instanceof Uint8Array ? c : globalThis.Buffer.from(c));\n return asU8A(globalThis.Buffer.concat(chunks, length));\n} : (chunks, length) => {\n const out = new Uint8Array(length);\n let off = 0;\n for (let b of chunks) {\n if (off + b.length > out.length) {\n b = b.subarray(0, out.length - off);\n }\n out.set(b, off);\n off += b.length;\n }\n return out;\n};\nconst alloc = useBuffer ? size => {\n return globalThis.Buffer.allocUnsafe(size);\n} : size => {\n return new Uint8Array(size);\n};\nconst toHex = useBuffer ? d => {\n if (typeof d === 'string') {\n return d;\n }\n return globalThis.Buffer.from(toBytes(d)).toString('hex');\n} : d => {\n if (typeof d === 'string') {\n return d;\n }\n return Array.prototype.reduce.call(toBytes(d), (p, c) => `${ p }${ c.toString(16).padStart(2, '0') }`, '');\n};\nconst fromHex = useBuffer ? hex => {\n if (hex instanceof Uint8Array) {\n return hex;\n }\n return globalThis.Buffer.from(hex, 'hex');\n} : hex => {\n if (hex instanceof Uint8Array) {\n return hex;\n }\n if (!hex.length) {\n return new Uint8Array(0);\n }\n return new Uint8Array(hex.split('').map((c, i, d) => i % 2 === 0 ? `0x${ c }${ d[i + 1] }` : '').filter(Boolean).map(e => parseInt(e, 16)));\n};\nfunction toBytes(obj) {\n if (obj instanceof Uint8Array && obj.constructor.name === 'Uint8Array') {\n return obj;\n }\n if (obj instanceof ArrayBuffer) {\n return new Uint8Array(obj);\n }\n if (ArrayBuffer.isView(obj)) {\n return new Uint8Array(obj.buffer, obj.byteOffset, obj.byteLength);\n }\n throw new Error('Unknown type, must be binary type');\n}\nfunction compare(b1, b2) {\n if (isBuffer(b1) && isBuffer(b2)) {\n return b1.compare(b2);\n }\n for (let i = 0; i < b1.length; i++) {\n if (b1[i] === b2[i]) {\n continue;\n }\n return b1[i] < b2[i] ? -1 : 1;\n }\n return 0;\n}\nfunction utf8ToBytes(string, units = Infinity) {\n let codePoint;\n const length = string.length;\n let leadSurrogate = null;\n const bytes = [];\n for (let i = 0; i < length; ++i) {\n codePoint = string.charCodeAt(i);\n if (codePoint > 55295 && codePoint < 57344) {\n if (!leadSurrogate) {\n if (codePoint > 56319) {\n if ((units -= 3) > -1)\n bytes.push(239, 191, 189);\n continue;\n } else if (i + 1 === length) {\n if ((units -= 3) > -1)\n bytes.push(239, 191, 189);\n continue;\n }\n leadSurrogate = codePoint;\n continue;\n }\n if (codePoint < 56320) {\n if ((units -= 3) > -1)\n bytes.push(239, 191, 189);\n leadSurrogate = codePoint;\n continue;\n }\n codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;\n } else if (leadSurrogate) {\n if ((units -= 3) > -1)\n bytes.push(239, 191, 189);\n }\n leadSurrogate = null;\n if (codePoint < 128) {\n if ((units -= 1) < 0)\n break;\n bytes.push(codePoint);\n } else if (codePoint < 2048) {\n if ((units -= 2) < 0)\n break;\n bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);\n } else if (codePoint < 65536) {\n if ((units -= 3) < 0)\n break;\n bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);\n } else if (codePoint < 1114112) {\n if ((units -= 4) < 0)\n break;\n bytes.push(codePoint >> 18 | 240, codePoint >> 12 & 63 | 128, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);\n } else {\n throw new Error('Invalid code point');\n }\n }\n return bytes;\n}\nfunction utf8Slice(buf, offset, end) {\n const res = [];\n while (offset < end) {\n const firstByte = buf[offset];\n let codePoint = null;\n let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;\n if (offset + bytesPerSequence <= end) {\n let secondByte, thirdByte, fourthByte, tempCodePoint;\n switch (bytesPerSequence) {\n case 1:\n if (firstByte < 128) {\n codePoint = firstByte;\n }\n break;\n case 2:\n secondByte = buf[offset + 1];\n if ((secondByte & 192) === 128) {\n tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;\n if (tempCodePoint > 127) {\n codePoint = tempCodePoint;\n }\n }\n break;\n case 3:\n secondByte = buf[offset + 1];\n thirdByte = buf[offset + 2];\n if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {\n tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;\n if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {\n codePoint = tempCodePoint;\n }\n }\n break;\n case 4:\n secondByte = buf[offset + 1];\n thirdByte = buf[offset + 2];\n fourthByte = buf[offset + 3];\n if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {\n tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;\n if (tempCodePoint > 65535 && tempCodePoint < 1114112) {\n codePoint = tempCodePoint;\n }\n }\n }\n }\n if (codePoint === null) {\n codePoint = 65533;\n bytesPerSequence = 1;\n } else if (codePoint > 65535) {\n codePoint -= 65536;\n res.push(codePoint >>> 10 & 1023 | 55296);\n codePoint = 56320 | codePoint & 1023;\n }\n res.push(codePoint);\n offset += bytesPerSequence;\n }\n return decodeCodePointsArray(res);\n}\nconst MAX_ARGUMENTS_LENGTH = 4096;\nfunction decodeCodePointsArray(codePoints) {\n const len = codePoints.length;\n if (len <= MAX_ARGUMENTS_LENGTH) {\n return String.fromCharCode.apply(String, codePoints);\n }\n let res = '';\n let i = 0;\n while (i < len) {\n res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));\n }\n return res;\n}\n\nexports.alloc = alloc;\nexports.asU8A = asU8A;\nexports.compare = compare;\nexports.concat = concat;\nexports.decodeCodePointsArray = decodeCodePointsArray;\nexports.fromArray = fromArray;\nexports.fromHex = fromHex;\nexports.fromString = fromString;\nexports.slice = slice;\nexports.toHex = toHex;\nexports.toString = toString;\nexports.useBuffer = useBuffer;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst decodeErrPrefix = 'CBOR decode error:';\nconst encodeErrPrefix = 'CBOR encode error:';\nconst uintMinorPrefixBytes = [];\nuintMinorPrefixBytes[23] = 1;\nuintMinorPrefixBytes[24] = 2;\nuintMinorPrefixBytes[25] = 3;\nuintMinorPrefixBytes[26] = 5;\nuintMinorPrefixBytes[27] = 9;\nfunction assertEnoughData(data, pos, need) {\n if (data.length - pos < need) {\n throw new Error(`${ decodeErrPrefix } not enough data for type`);\n }\n}\n\nexports.assertEnoughData = assertEnoughData;\nexports.decodeErrPrefix = decodeErrPrefix;\nexports.encodeErrPrefix = encodeErrPrefix;\nexports.uintMinorPrefixBytes = uintMinorPrefixBytes;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar common = require('./common.js');\nvar token = require('./token.js');\nvar jump = require('./jump.js');\n\nconst defaultDecodeOptions = {\n strict: false,\n allowIndefinite: true,\n allowUndefined: true,\n allowBigInt: true\n};\nclass Tokeniser {\n constructor(data, options = {}) {\n this.pos = 0;\n this.data = data;\n this.options = options;\n }\n done() {\n return this.pos >= this.data.length;\n }\n next() {\n const byt = this.data[this.pos];\n let token = jump.quick[byt];\n if (token === undefined) {\n const decoder = jump.jump[byt];\n if (!decoder) {\n throw new Error(`${ common.decodeErrPrefix } no decoder for major type ${ byt >>> 5 } (byte 0x${ byt.toString(16).padStart(2, '0') })`);\n }\n const minor = byt & 31;\n token = decoder(this.data, this.pos, minor, this.options);\n }\n this.pos += token.encodedLength;\n return token;\n }\n}\nconst DONE = Symbol.for('DONE');\nconst BREAK = Symbol.for('BREAK');\nfunction tokenToArray(token, tokeniser, options) {\n const arr = [];\n for (let i = 0; i < token.value; i++) {\n const value = tokensToObject(tokeniser, options);\n if (value === BREAK) {\n if (token.value === Infinity) {\n break;\n }\n throw new Error(`${ common.decodeErrPrefix } got unexpected break to lengthed array`);\n }\n if (value === DONE) {\n throw new Error(`${ common.decodeErrPrefix } found array but not enough entries (got ${ i }, expected ${ token.value })`);\n }\n arr[i] = value;\n }\n return arr;\n}\nfunction tokenToMap(token, tokeniser, options) {\n const useMaps = options.useMaps === true;\n const obj = useMaps ? undefined : {};\n const m = useMaps ? new Map() : undefined;\n for (let i = 0; i < token.value; i++) {\n const key = tokensToObject(tokeniser, options);\n if (key === BREAK) {\n if (token.value === Infinity) {\n break;\n }\n throw new Error(`${ common.decodeErrPrefix } got unexpected break to lengthed map`);\n }\n if (key === DONE) {\n throw new Error(`${ common.decodeErrPrefix } found map but not enough entries (got ${ i } [no key], expected ${ token.value })`);\n }\n if (useMaps !== true && typeof key !== 'string') {\n throw new Error(`${ common.decodeErrPrefix } non-string keys not supported (got ${ typeof key })`);\n }\n const value = tokensToObject(tokeniser, options);\n if (value === DONE) {\n throw new Error(`${ common.decodeErrPrefix } found map but not enough entries (got ${ i } [no value], expected ${ token.value })`);\n }\n if (useMaps) {\n m.set(key, value);\n } else {\n obj[key] = value;\n }\n }\n return useMaps ? m : obj;\n}\nfunction tokensToObject(tokeniser, options) {\n if (tokeniser.done()) {\n return DONE;\n }\n const token$1 = tokeniser.next();\n if (token$1.type === token.Type.break) {\n return BREAK;\n }\n if (token$1.type.terminal) {\n return token$1.value;\n }\n if (token$1.type === token.Type.array) {\n return tokenToArray(token$1, tokeniser, options);\n }\n if (token$1.type === token.Type.map) {\n return tokenToMap(token$1, tokeniser, options);\n }\n if (token$1.type === token.Type.tag) {\n if (options.tags && typeof options.tags[token$1.value] === 'function') {\n const tagged = tokensToObject(tokeniser, options);\n return options.tags[token$1.value](tagged);\n }\n throw new Error(`${ common.decodeErrPrefix } tag not supported (${ token$1.value })`);\n }\n throw new Error('unsupported');\n}\nfunction decode(data, options) {\n if (!(data instanceof Uint8Array)) {\n throw new Error(`${ common.decodeErrPrefix } data to decode must be a Uint8Array`);\n }\n options = Object.assign({}, defaultDecodeOptions, options);\n const tokeniser = options.tokenizer || new Tokeniser(data, options);\n const decoded = tokensToObject(tokeniser, options);\n if (decoded === DONE) {\n throw new Error(`${ common.decodeErrPrefix } did not find any content to decode`);\n }\n if (decoded === BREAK) {\n throw new Error(`${ common.decodeErrPrefix } got unexpected break`);\n }\n if (!tokeniser.done()) {\n throw new Error(`${ common.decodeErrPrefix } too many terminals, data makes no sense`);\n }\n return decoded;\n}\n\nexports.Tokeniser = Tokeniser;\nexports.decode = decode;\nexports.tokensToObject = tokensToObject;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar is = require('./is.js');\nvar token = require('./token.js');\nvar bl = require('./bl.js');\nvar common = require('./common.js');\nvar jump = require('./jump.js');\nvar byteUtils = require('./byte-utils.js');\nvar _0uint = require('./0uint.js');\nvar _1negint = require('./1negint.js');\nvar _2bytes = require('./2bytes.js');\nvar _3string = require('./3string.js');\nvar _4array = require('./4array.js');\nvar _5map = require('./5map.js');\nvar _6tag = require('./6tag.js');\nvar _7float = require('./7float.js');\n\nconst defaultEncodeOptions = {\n float64: false,\n mapSorter,\n quickEncodeToken: jump.quickEncodeToken\n};\nconst cborEncoders = [];\ncborEncoders[token.Type.uint.major] = _0uint.encodeUint;\ncborEncoders[token.Type.negint.major] = _1negint.encodeNegint;\ncborEncoders[token.Type.bytes.major] = _2bytes.encodeBytes;\ncborEncoders[token.Type.string.major] = _3string.encodeString;\ncborEncoders[token.Type.array.major] = _4array.encodeArray;\ncborEncoders[token.Type.map.major] = _5map.encodeMap;\ncborEncoders[token.Type.tag.major] = _6tag.encodeTag;\ncborEncoders[token.Type.float.major] = _7float.encodeFloat;\nconst buf = new bl.Bl();\nclass Ref {\n constructor(obj, parent) {\n this.obj = obj;\n this.parent = parent;\n }\n includes(obj) {\n let p = this;\n do {\n if (p.obj === obj) {\n return true;\n }\n } while (p = p.parent);\n return false;\n }\n static createCheck(stack, obj) {\n if (stack && stack.includes(obj)) {\n throw new Error(`${ common.encodeErrPrefix } object contains circular references`);\n }\n return new Ref(obj, stack);\n }\n}\nconst simpleTokens = {\n null: new token.Token(token.Type.null, null),\n undefined: new token.Token(token.Type.undefined, undefined),\n true: new token.Token(token.Type.true, true),\n false: new token.Token(token.Type.false, false),\n emptyArray: new token.Token(token.Type.array, 0),\n emptyMap: new token.Token(token.Type.map, 0)\n};\nconst typeEncoders = {\n number(obj, _typ, _options, _refStack) {\n if (!Number.isInteger(obj) || !Number.isSafeInteger(obj)) {\n return new token.Token(token.Type.float, obj);\n } else if (obj >= 0) {\n return new token.Token(token.Type.uint, obj);\n } else {\n return new token.Token(token.Type.negint, obj);\n }\n },\n bigint(obj, _typ, _options, _refStack) {\n if (obj >= BigInt(0)) {\n return new token.Token(token.Type.uint, obj);\n } else {\n return new token.Token(token.Type.negint, obj);\n }\n },\n Uint8Array(obj, _typ, _options, _refStack) {\n return new token.Token(token.Type.bytes, obj);\n },\n string(obj, _typ, _options, _refStack) {\n return new token.Token(token.Type.string, obj);\n },\n boolean(obj, _typ, _options, _refStack) {\n return obj ? simpleTokens.true : simpleTokens.false;\n },\n null(_obj, _typ, _options, _refStack) {\n return simpleTokens.null;\n },\n undefined(_obj, _typ, _options, _refStack) {\n return simpleTokens.undefined;\n },\n ArrayBuffer(obj, _typ, _options, _refStack) {\n return new token.Token(token.Type.bytes, new Uint8Array(obj));\n },\n DataView(obj, _typ, _options, _refStack) {\n return new token.Token(token.Type.bytes, new Uint8Array(obj.buffer, obj.byteOffset, obj.byteLength));\n },\n Array(obj, _typ, options, refStack) {\n if (!obj.length) {\n if (options.addBreakTokens === true) {\n return [\n simpleTokens.emptyArray,\n new token.Token(token.Type.break)\n ];\n }\n return simpleTokens.emptyArray;\n }\n refStack = Ref.createCheck(refStack, obj);\n const entries = [];\n let i = 0;\n for (const e of obj) {\n entries[i++] = objectToTokens(e, options, refStack);\n }\n if (options.addBreakTokens) {\n return [\n new token.Token(token.Type.array, obj.length),\n entries,\n new token.Token(token.Type.break)\n ];\n }\n return [\n new token.Token(token.Type.array, obj.length),\n entries\n ];\n },\n Object(obj, typ, options, refStack) {\n const isMap = typ !== 'Object';\n const keys = isMap ? obj.keys() : Object.keys(obj);\n const length = isMap ? obj.size : keys.length;\n if (!length) {\n if (options.addBreakTokens === true) {\n return [\n simpleTokens.emptyMap,\n new token.Token(token.Type.break)\n ];\n }\n return simpleTokens.emptyMap;\n }\n refStack = Ref.createCheck(refStack, obj);\n const entries = [];\n let i = 0;\n for (const key of keys) {\n entries[i++] = [\n objectToTokens(key, options, refStack),\n objectToTokens(isMap ? obj.get(key) : obj[key], options, refStack)\n ];\n }\n sortMapEntries(entries, options);\n if (options.addBreakTokens) {\n return [\n new token.Token(token.Type.map, length),\n entries,\n new token.Token(token.Type.break)\n ];\n }\n return [\n new token.Token(token.Type.map, length),\n entries\n ];\n }\n};\ntypeEncoders.Map = typeEncoders.Object;\ntypeEncoders.Buffer = typeEncoders.Uint8Array;\nfor (const typ of 'Uint8Clamped Uint16 Uint32 Int8 Int16 Int32 BigUint64 BigInt64 Float32 Float64'.split(' ')) {\n typeEncoders[`${ typ }Array`] = typeEncoders.DataView;\n}\nfunction objectToTokens(obj, options = {}, refStack) {\n const typ = is.is(obj);\n const customTypeEncoder = options && options.typeEncoders && options.typeEncoders[typ] || typeEncoders[typ];\n if (typeof customTypeEncoder === 'function') {\n const tokens = customTypeEncoder(obj, typ, options, refStack);\n if (tokens != null) {\n return tokens;\n }\n }\n const typeEncoder = typeEncoders[typ];\n if (!typeEncoder) {\n throw new Error(`${ common.encodeErrPrefix } unsupported type: ${ typ }`);\n }\n return typeEncoder(obj, typ, options, refStack);\n}\nfunction sortMapEntries(entries, options) {\n if (options.mapSorter) {\n entries.sort(options.mapSorter);\n }\n}\nfunction mapSorter(e1, e2) {\n const keyToken1 = Array.isArray(e1[0]) ? e1[0][0] : e1[0];\n const keyToken2 = Array.isArray(e2[0]) ? e2[0][0] : e2[0];\n if (keyToken1.type !== keyToken2.type) {\n return keyToken1.type.compare(keyToken2.type);\n }\n const major = keyToken1.type.major;\n const tcmp = cborEncoders[major].compareTokens(keyToken1, keyToken2);\n if (tcmp === 0) {\n console.warn('WARNING: complex key types used, CBOR key sorting guarantees are gone');\n }\n return tcmp;\n}\nfunction tokensToEncoded(buf, tokens, encoders, options) {\n if (Array.isArray(tokens)) {\n for (const token of tokens) {\n tokensToEncoded(buf, token, encoders, options);\n }\n } else {\n encoders[tokens.type.major](buf, tokens, options);\n }\n}\nfunction encodeCustom(data, encoders, options) {\n const tokens = objectToTokens(data, options);\n if (!Array.isArray(tokens) && options.quickEncodeToken) {\n const quickBytes = options.quickEncodeToken(tokens);\n if (quickBytes) {\n return quickBytes;\n }\n const encoder = encoders[tokens.type.major];\n if (encoder.encodedSize) {\n const size = encoder.encodedSize(tokens, options);\n const buf = new bl.Bl(size);\n encoder(buf, tokens, options);\n if (buf.chunks.length !== 1) {\n throw new Error(`Unexpected error: pre-calculated length for ${ tokens } was wrong`);\n }\n return byteUtils.asU8A(buf.chunks[0]);\n }\n }\n tokensToEncoded(buf, tokens, encoders, options);\n return buf.toBytes(true);\n}\nfunction encode(data, options) {\n options = Object.assign({}, defaultEncodeOptions, options);\n return encodeCustom(data, cborEncoders, options);\n}\n\nexports.Ref = Ref;\nexports.encode = encode;\nexports.encodeCustom = encodeCustom;\nexports.objectToTokens = objectToTokens;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst typeofs = [\n 'string',\n 'number',\n 'bigint',\n 'symbol'\n];\nconst objectTypeNames = [\n 'Function',\n 'Generator',\n 'AsyncGenerator',\n 'GeneratorFunction',\n 'AsyncGeneratorFunction',\n 'AsyncFunction',\n 'Observable',\n 'Array',\n 'Buffer',\n 'Object',\n 'RegExp',\n 'Date',\n 'Error',\n 'Map',\n 'Set',\n 'WeakMap',\n 'WeakSet',\n 'ArrayBuffer',\n 'SharedArrayBuffer',\n 'DataView',\n 'Promise',\n 'URL',\n 'HTMLElement',\n 'Int8Array',\n 'Uint8Array',\n 'Uint8ClampedArray',\n 'Int16Array',\n 'Uint16Array',\n 'Int32Array',\n 'Uint32Array',\n 'Float32Array',\n 'Float64Array',\n 'BigInt64Array',\n 'BigUint64Array'\n];\nfunction is(value) {\n if (value === null) {\n return 'null';\n }\n if (value === undefined) {\n return 'undefined';\n }\n if (value === true || value === false) {\n return 'boolean';\n }\n const typeOf = typeof value;\n if (typeofs.includes(typeOf)) {\n return typeOf;\n }\n if (typeOf === 'function') {\n return 'Function';\n }\n if (Array.isArray(value)) {\n return 'Array';\n }\n if (isBuffer(value)) {\n return 'Buffer';\n }\n const objectType = getObjectType(value);\n if (objectType) {\n return objectType;\n }\n return 'Object';\n}\nfunction isBuffer(value) {\n return value && value.constructor && value.constructor.isBuffer && value.constructor.isBuffer.call(null, value);\n}\nfunction getObjectType(value) {\n const objectTypeName = Object.prototype.toString.call(value).slice(8, -1);\n if (objectTypeNames.includes(objectTypeName)) {\n return objectTypeName;\n }\n return undefined;\n}\n\nexports.is = is;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar token = require('./token.js');\nvar _0uint = require('./0uint.js');\nvar _1negint = require('./1negint.js');\nvar _2bytes = require('./2bytes.js');\nvar _3string = require('./3string.js');\nvar _4array = require('./4array.js');\nvar _5map = require('./5map.js');\nvar _6tag = require('./6tag.js');\nvar _7float = require('./7float.js');\nvar common = require('./common.js');\nvar byteUtils = require('./byte-utils.js');\n\nfunction invalidMinor(data, pos, minor) {\n throw new Error(`${ common.decodeErrPrefix } encountered invalid minor (${ minor }) for major ${ data[pos] >>> 5 }`);\n}\nfunction errorer(msg) {\n return () => {\n throw new Error(`${ common.decodeErrPrefix } ${ msg }`);\n };\n}\nconst jump = [];\nfor (let i = 0; i <= 23; i++) {\n jump[i] = invalidMinor;\n}\njump[24] = _0uint.decodeUint8;\njump[25] = _0uint.decodeUint16;\njump[26] = _0uint.decodeUint32;\njump[27] = _0uint.decodeUint64;\njump[28] = invalidMinor;\njump[29] = invalidMinor;\njump[30] = invalidMinor;\njump[31] = invalidMinor;\nfor (let i = 32; i <= 55; i++) {\n jump[i] = invalidMinor;\n}\njump[56] = _1negint.decodeNegint8;\njump[57] = _1negint.decodeNegint16;\njump[58] = _1negint.decodeNegint32;\njump[59] = _1negint.decodeNegint64;\njump[60] = invalidMinor;\njump[61] = invalidMinor;\njump[62] = invalidMinor;\njump[63] = invalidMinor;\nfor (let i = 64; i <= 87; i++) {\n jump[i] = _2bytes.decodeBytesCompact;\n}\njump[88] = _2bytes.decodeBytes8;\njump[89] = _2bytes.decodeBytes16;\njump[90] = _2bytes.decodeBytes32;\njump[91] = _2bytes.decodeBytes64;\njump[92] = invalidMinor;\njump[93] = invalidMinor;\njump[94] = invalidMinor;\njump[95] = errorer('indefinite length bytes/strings are not supported');\nfor (let i = 96; i <= 119; i++) {\n jump[i] = _3string.decodeStringCompact;\n}\njump[120] = _3string.decodeString8;\njump[121] = _3string.decodeString16;\njump[122] = _3string.decodeString32;\njump[123] = _3string.decodeString64;\njump[124] = invalidMinor;\njump[125] = invalidMinor;\njump[126] = invalidMinor;\njump[127] = errorer('indefinite length bytes/strings are not supported');\nfor (let i = 128; i <= 151; i++) {\n jump[i] = _4array.decodeArrayCompact;\n}\njump[152] = _4array.decodeArray8;\njump[153] = _4array.decodeArray16;\njump[154] = _4array.decodeArray32;\njump[155] = _4array.decodeArray64;\njump[156] = invalidMinor;\njump[157] = invalidMinor;\njump[158] = invalidMinor;\njump[159] = _4array.decodeArrayIndefinite;\nfor (let i = 160; i <= 183; i++) {\n jump[i] = _5map.decodeMapCompact;\n}\njump[184] = _5map.decodeMap8;\njump[185] = _5map.decodeMap16;\njump[186] = _5map.decodeMap32;\njump[187] = _5map.decodeMap64;\njump[188] = invalidMinor;\njump[189] = invalidMinor;\njump[190] = invalidMinor;\njump[191] = _5map.decodeMapIndefinite;\nfor (let i = 192; i <= 215; i++) {\n jump[i] = _6tag.decodeTagCompact;\n}\njump[216] = _6tag.decodeTag8;\njump[217] = _6tag.decodeTag16;\njump[218] = _6tag.decodeTag32;\njump[219] = _6tag.decodeTag64;\njump[220] = invalidMinor;\njump[221] = invalidMinor;\njump[222] = invalidMinor;\njump[223] = invalidMinor;\nfor (let i = 224; i <= 243; i++) {\n jump[i] = errorer('simple values are not supported');\n}\njump[244] = invalidMinor;\njump[245] = invalidMinor;\njump[246] = invalidMinor;\njump[247] = _7float.decodeUndefined;\njump[248] = errorer('simple values are not supported');\njump[249] = _7float.decodeFloat16;\njump[250] = _7float.decodeFloat32;\njump[251] = _7float.decodeFloat64;\njump[252] = invalidMinor;\njump[253] = invalidMinor;\njump[254] = invalidMinor;\njump[255] = _7float.decodeBreak;\nconst quick = [];\nfor (let i = 0; i < 24; i++) {\n quick[i] = new token.Token(token.Type.uint, i, 1);\n}\nfor (let i = -1; i >= -24; i--) {\n quick[31 - i] = new token.Token(token.Type.negint, i, 1);\n}\nquick[64] = new token.Token(token.Type.bytes, new Uint8Array(0), 1);\nquick[96] = new token.Token(token.Type.string, '', 1);\nquick[128] = new token.Token(token.Type.array, 0, 1);\nquick[160] = new token.Token(token.Type.map, 0, 1);\nquick[244] = new token.Token(token.Type.false, false, 1);\nquick[245] = new token.Token(token.Type.true, true, 1);\nquick[246] = new token.Token(token.Type.null, null, 1);\nfunction quickEncodeToken(token$1) {\n switch (token$1.type) {\n case token.Type.false:\n return byteUtils.fromArray([244]);\n case token.Type.true:\n return byteUtils.fromArray([245]);\n case token.Type.null:\n return byteUtils.fromArray([246]);\n case token.Type.bytes:\n if (!token$1.value.length) {\n return byteUtils.fromArray([64]);\n }\n return;\n case token.Type.string:\n if (token$1.value === '') {\n return byteUtils.fromArray([96]);\n }\n return;\n case token.Type.array:\n if (token$1.value === 0) {\n return byteUtils.fromArray([128]);\n }\n return;\n case token.Type.map:\n if (token$1.value === 0) {\n return byteUtils.fromArray([160]);\n }\n return;\n case token.Type.uint:\n if (token$1.value < 24) {\n return byteUtils.fromArray([Number(token$1.value)]);\n }\n return;\n case token.Type.negint:\n if (token$1.value >= -24) {\n return byteUtils.fromArray([31 - Number(token$1.value)]);\n }\n }\n}\n\nexports.jump = jump;\nexports.quick = quick;\nexports.quickEncodeToken = quickEncodeToken;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Type {\n constructor(major, name, terminal) {\n this.major = major;\n this.majorEncoded = major << 5;\n this.name = name;\n this.terminal = terminal;\n }\n toString() {\n return `Type[${ this.major }].${ this.name }`;\n }\n compare(typ) {\n return this.major < typ.major ? -1 : this.major > typ.major ? 1 : 0;\n }\n}\nType.uint = new Type(0, 'uint', true);\nType.negint = new Type(1, 'negint', true);\nType.bytes = new Type(2, 'bytes', true);\nType.string = new Type(3, 'string', true);\nType.array = new Type(4, 'array', false);\nType.map = new Type(5, 'map', false);\nType.tag = new Type(6, 'tag', false);\nType.float = new Type(7, 'float', true);\nType.false = new Type(7, 'false', true);\nType.true = new Type(7, 'true', true);\nType.null = new Type(7, 'null', true);\nType.undefined = new Type(7, 'undefined', true);\nType.break = new Type(7, 'break', true);\nclass Token {\n constructor(type, value, encodedLength) {\n this.type = type;\n this.value = value;\n this.encodedLength = encodedLength;\n this.encodedBytes = undefined;\n }\n toString() {\n return `Token[${ this.type }].${ this.value }`;\n }\n}\n\nexports.Token = Token;\nexports.Type = Type;\n","module.exports = function (xs, fn) {\n var res = [];\n for (var i = 0; i < xs.length; i++) {\n var x = fn(xs[i], i);\n if (isArray(x)) res.push.apply(res, x);\n else res.push(x);\n }\n return res;\n};\n\nvar isArray = Array.isArray || function (xs) {\n return Object.prototype.toString.call(xs) === '[object Array]';\n};\n","\"use strict\";\n/**\n * Returns a `Buffer` instance from the given data URI `uri`.\n *\n * @param {String} uri Data URI to turn into a Buffer instance\n * @return {Buffer} Buffer instance from Data URI\n * @api public\n */\nfunction dataUriToBuffer(uri) {\n if (!/^data:/i.test(uri)) {\n throw new TypeError('`uri` does not appear to be a Data URI (must begin with \"data:\")');\n }\n // strip newlines\n uri = uri.replace(/\\r?\\n/g, '');\n // split the URI up into the \"metadata\" and the \"data\" portions\n const firstComma = uri.indexOf(',');\n if (firstComma === -1 || firstComma <= 4) {\n throw new TypeError('malformed data: URI');\n }\n // remove the \"data:\" scheme and parse the metadata\n const meta = uri.substring(5, firstComma).split(';');\n let charset = '';\n let base64 = false;\n const type = meta[0] || 'text/plain';\n let typeFull = type;\n for (let i = 1; i < meta.length; i++) {\n if (meta[i] === 'base64') {\n base64 = true;\n }\n else {\n typeFull += `;${meta[i]}`;\n if (meta[i].indexOf('charset=') === 0) {\n charset = meta[i].substring(8);\n }\n }\n }\n // defaults to US-ASCII only if type is not provided\n if (!meta[0] && !charset.length) {\n typeFull += ';charset=US-ASCII';\n charset = 'US-ASCII';\n }\n // get the encoded data portion and decode URI-encoded chars\n const encoding = base64 ? 'base64' : 'ascii';\n const data = unescape(uri.substring(firstComma + 1));\n const buffer = Buffer.from(data, encoding);\n // set `.type` and `.typeFull` properties to MIME type\n buffer.type = type;\n buffer.typeFull = typeFull;\n // set the `.charset` property\n buffer.charset = charset;\n return buffer;\n}\nmodule.exports = dataUriToBuffer;\n//# sourceMappingURL=index.js.map","'use strict';\n\n/**\n * @typedef {{ [key: string]: any }} Extensions\n * @typedef {Error} Err\n * @property {string} message\n */\n\n/**\n *\n * @param {Error} obj\n * @param {Extensions} props\n * @returns {Error & Extensions}\n */\nfunction assign(obj, props) {\n for (const key in props) {\n Object.defineProperty(obj, key, {\n value: props[key],\n enumerable: true,\n configurable: true,\n });\n }\n\n return obj;\n}\n\n/**\n *\n * @param {any} err - An Error\n * @param {string|Extensions} code - A string code or props to set on the error\n * @param {Extensions} [props] - Props to set on the error\n * @returns {Error & Extensions}\n */\nfunction createError(err, code, props) {\n if (!err || typeof err === 'string') {\n throw new TypeError('Please pass an Error to err-code');\n }\n\n if (!props) {\n props = {};\n }\n\n if (typeof code === 'object') {\n props = code;\n code = '';\n }\n\n if (code) {\n props.code = code;\n }\n\n try {\n return assign(err, props);\n } catch (_) {\n props.message = err.message;\n props.stack = err.stack;\n\n const ErrClass = function () {};\n\n ErrClass.prototype = Object.create(Object.getPrototypeOf(err));\n\n // @ts-ignore\n const output = assign(new ErrClass(), props);\n\n return output;\n }\n}\n\nmodule.exports = createError;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar Path = require('path');\nvar fs = require('fs');\nvar glob = require('it-glob');\nvar errCode = require('err-code');\n\nfunction _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }\n\nvar Path__default = /*#__PURE__*/_interopDefaultLegacy(Path);\nvar fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);\nvar glob__default = /*#__PURE__*/_interopDefaultLegacy(glob);\nvar errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode);\n\nasync function getFilesFromPath(paths, options) {\n const files = [];\n for await (const file of filesFromPath(paths, options)) {\n files.push(file);\n }\n return files;\n}\nasync function* filesFromPath(paths, options) {\n options = options || {};\n if (typeof paths === 'string') {\n paths = [paths];\n }\n const globSourceOptions = {\n recursive: true,\n glob: {\n dot: Boolean(options.hidden),\n ignore: Array.isArray(options.ignore) ? options.ignore : [],\n follow: options.followSymlinks != null ? options.followSymlinks : true\n }\n };\n for await (const path of paths) {\n if (typeof path !== 'string') {\n throw errCode__default['default'](new Error('Path must be a string'), 'ERR_INVALID_PATH', { path });\n }\n const absolutePath = Path__default['default'].resolve(process.cwd(), path);\n const stat = await fs.promises.stat(absolutePath);\n const prefix = Path__default['default'].dirname(absolutePath);\n let mode = options.mode;\n if (options.preserveMode) {\n mode = stat.mode;\n }\n let mtime = options.mtime;\n if (options.preserveMtime) {\n mtime = stat.mtime;\n }\n yield* toGlobSource({\n path,\n type: stat.isDirectory() ? 'dir' : 'file',\n prefix,\n mode,\n mtime,\n size: stat.size,\n preserveMode: options.preserveMode,\n preserveMtime: options.preserveMtime\n }, globSourceOptions);\n }\n}\nasync function* toGlobSource({path, type, prefix, mode, mtime, size, preserveMode, preserveMtime}, options) {\n options = options || {};\n const baseName = Path__default['default'].basename(path);\n if (type === 'file') {\n yield {\n name: `/${ baseName.replace(prefix, '') }`,\n stream: () => fs__default['default'].createReadStream(Path__default['default'].isAbsolute(path) ? path : Path__default['default'].join(process.cwd(), path)),\n mode,\n mtime,\n size\n };\n return;\n }\n const globOptions = Object.assign({}, options.glob, {\n cwd: path,\n nodir: false,\n realpath: false,\n absolute: true\n });\n for await (const p of glob__default['default'](path, '**/*', globOptions)) {\n const stat = await fs.promises.stat(p);\n if (!stat.isFile()) {\n continue;\n }\n if (preserveMode || preserveMtime) {\n if (preserveMode) {\n mode = stat.mode;\n }\n if (preserveMtime) {\n mtime = stat.mtime;\n }\n }\n yield {\n name: toPosix(p.replace(prefix, '')),\n stream: () => fs__default['default'].createReadStream(p),\n mode,\n mtime,\n size: stat.size\n };\n }\n}\nconst toPosix = path => path.replace(/\\\\/g, '/');\n\nexports.filesFromPath = filesFromPath;\nexports.getFilesFromPath = getFilesFromPath;\n","'use strict'\n\n// @ts-ignore\nconst SparseArray = require('sparse-array')\nconst uint8ArrayFromString = require('uint8arrays/from-string')\n\n/**\n * @typedef {import('./consumable-hash').InfiniteHash} InfiniteHash\n * @typedef {import('../').UserBucketOptions} UserBucketOptions\n */\n\n/**\n * @template V\n * @typedef {object} BucketChild\n * @property {string} key\n * @property {V} value\n * @property {InfiniteHash} hash\n */\n\n/**\n * @template B\n *\n * @typedef {object} SA\n * @property {number} length\n * @property {() => B[]} compactArray\n * @property {(i: number) => B} get\n * @property {(i: number, value: B) => void} set\n * @property { (fn: (acc: A, curr: B, index: number) => A, initial: A) => B} reduce\n * @property {(fn: (item: B) => boolean) => B | undefined} find\n * @property {() => number[]} bitField\n * @property {(i: number) => void} unset\n */\n\n/**\n * @template T\n *\n * @typedef {object} BucketPosition\n * @property {Bucket} bucket\n * @property {number} pos\n * @property {InfiniteHash} hash\n * @property {BucketChild} [existingChild]\n */\n\n/**\n * @typedef {object} BucketOptions\n * @property {number} bits\n * @property {(value: Uint8Array | InfiniteHash) => InfiniteHash} hash\n */\n\n/**\n * @template T\n */\nclass Bucket {\n /**\n * @param {BucketOptions} options\n * @param {Bucket} [parent]\n * @param {number} [posAtParent=0]\n */\n constructor (options, parent, posAtParent = 0) {\n this._options = options\n this._popCount = 0\n this._parent = parent\n this._posAtParent = posAtParent\n\n /** @type {SA | BucketChild>} */\n this._children = new SparseArray()\n\n /** @type {string | null} */\n this.key = null\n }\n\n /**\n * @param {string} key\n * @param {T} value\n */\n async put (key, value) {\n const place = await this._findNewBucketAndPos(key)\n\n await place.bucket._putAt(place, key, value)\n }\n\n /**\n * @param {string} key\n */\n async get (key) {\n const child = await this._findChild(key)\n\n if (child) {\n return child.value\n }\n }\n\n /**\n * @param {string} key\n */\n async del (key) {\n const place = await this._findPlace(key)\n const child = place.bucket._at(place.pos)\n\n if (child && child.key === key) {\n place.bucket._delAt(place.pos)\n }\n }\n\n /**\n * @returns {number}\n */\n leafCount () {\n const children = this._children.compactArray()\n\n return children.reduce((acc, child) => {\n if (child instanceof Bucket) {\n return acc + child.leafCount()\n }\n\n return acc + 1\n }, 0)\n }\n\n childrenCount () {\n return this._children.length\n }\n\n onlyChild () {\n return this._children.get(0)\n }\n\n /**\n * @returns {Iterable>}\n */\n * eachLeafSeries () {\n const children = this._children.compactArray()\n\n for (const child of children) {\n if (child instanceof Bucket) {\n yield * child.eachLeafSeries()\n } else {\n yield child\n }\n }\n\n // this is necessary because tsc requires a @return annotation as it\n // can't derive a return type due to the recursion, and eslint requires\n // a return statement when there is a @return annotation\n return []\n }\n\n /**\n * @param {(value: BucketChild, index: number) => T} map\n * @param {(reduced: any) => any} reduce\n */\n serialize (map, reduce) {\n /** @type {T[]} */\n const acc = []\n // serialize to a custom non-sparse representation\n return reduce(this._children.reduce((acc, child, index) => {\n if (child) {\n if (child instanceof Bucket) {\n acc.push(child.serialize(map, reduce))\n } else {\n acc.push(map(child, index))\n }\n }\n return acc\n }, acc))\n }\n\n /**\n * @param {(value: BucketChild) => Promise} asyncMap\n * @param {(reduced: any) => Promise} asyncReduce\n */\n asyncTransform (asyncMap, asyncReduce) {\n return asyncTransformBucket(this, asyncMap, asyncReduce)\n }\n\n toJSON () {\n return this.serialize(mapNode, reduceNodes)\n }\n\n prettyPrint () {\n return JSON.stringify(this.toJSON(), null, ' ')\n }\n\n tableSize () {\n return Math.pow(2, this._options.bits)\n }\n\n /**\n * @param {string} key\n * @returns {Promise | undefined>}\n */\n async _findChild (key) {\n const result = await this._findPlace(key)\n const child = result.bucket._at(result.pos)\n\n if (child instanceof Bucket) {\n // should not be possible, this._findPlace should always\n // return a location for a child, not a bucket\n return undefined\n }\n\n if (child && child.key === key) {\n return child\n }\n }\n\n /**\n * @param {string | InfiniteHash} key\n * @returns {Promise>}\n */\n async _findPlace (key) {\n const hashValue = this._options.hash(typeof key === 'string' ? uint8ArrayFromString(key) : key)\n const index = await hashValue.take(this._options.bits)\n\n const child = this._children.get(index)\n\n if (child instanceof Bucket) {\n return child._findPlace(hashValue)\n }\n\n return {\n bucket: this,\n pos: index,\n hash: hashValue,\n existingChild: child\n }\n }\n\n /**\n * @param {string | InfiniteHash} key\n * @returns {Promise>}\n */\n async _findNewBucketAndPos (key) {\n const place = await this._findPlace(key)\n\n if (place.existingChild && place.existingChild.key !== key) {\n // conflict\n const bucket = new Bucket(this._options, place.bucket, place.pos)\n place.bucket._putObjectAt(place.pos, bucket)\n\n // put the previous value\n const newPlace = await bucket._findPlace(place.existingChild.hash)\n newPlace.bucket._putAt(newPlace, place.existingChild.key, place.existingChild.value)\n\n return bucket._findNewBucketAndPos(place.hash)\n }\n\n // no conflict, we found the place\n return place\n }\n\n /**\n * @param {BucketPosition} place\n * @param {string} key\n * @param {T} value\n */\n _putAt (place, key, value) {\n this._putObjectAt(place.pos, {\n key: key,\n value: value,\n hash: place.hash\n })\n }\n\n /**\n * @param {number} pos\n * @param {Bucket | BucketChild} object\n */\n _putObjectAt (pos, object) {\n if (!this._children.get(pos)) {\n this._popCount++\n }\n this._children.set(pos, object)\n }\n\n /**\n * @param {number} pos\n */\n _delAt (pos) {\n if (pos === -1) {\n throw new Error('Invalid position')\n }\n\n if (this._children.get(pos)) {\n this._popCount--\n }\n this._children.unset(pos)\n this._level()\n }\n\n _level () {\n if (this._parent && this._popCount <= 1) {\n if (this._popCount === 1) {\n // remove myself from parent, replacing me with my only child\n const onlyChild = this._children.find(exists)\n\n if (onlyChild && !(onlyChild instanceof Bucket)) {\n const hash = onlyChild.hash\n hash.untake(this._options.bits)\n const place = {\n pos: this._posAtParent,\n hash: hash,\n bucket: this._parent\n }\n this._parent._putAt(place, onlyChild.key, onlyChild.value)\n }\n } else {\n this._parent._delAt(this._posAtParent)\n }\n }\n }\n\n /**\n * @param {number} index\n * @returns {BucketChild | Bucket | undefined}\n */\n _at (index) {\n return this._children.get(index)\n }\n}\n\n/**\n * @param {any} o\n */\nfunction exists (o) {\n return Boolean(o)\n}\n\n/**\n *\n * @param {*} node\n * @param {number} index\n */\nfunction mapNode (node, index) {\n return node.key\n}\n\n/**\n * @param {*} nodes\n */\nfunction reduceNodes (nodes) {\n return nodes\n}\n\n/**\n * @template T\n *\n * @param {Bucket} bucket\n * @param {(value: BucketChild) => Promise} asyncMap\n * @param {(reduced: any) => Promise} asyncReduce\n */\nasync function asyncTransformBucket (bucket, asyncMap, asyncReduce) {\n const output = []\n\n for (const child of bucket._children.compactArray()) {\n if (child instanceof Bucket) {\n await asyncTransformBucket(child, asyncMap, asyncReduce)\n } else {\n const mappedChildren = await asyncMap(child)\n\n output.push({\n bitField: bucket._children.bitField(),\n children: mappedChildren\n })\n }\n }\n\n return asyncReduce(output)\n}\n\nmodule.exports = Bucket\n","'use strict'\n\nconst START_MASKS = [\n 0b11111111,\n 0b11111110,\n 0b11111100,\n 0b11111000,\n 0b11110000,\n 0b11100000,\n 0b11000000,\n 0b10000000\n]\n\nconst STOP_MASKS = [\n 0b00000001,\n 0b00000011,\n 0b00000111,\n 0b00001111,\n 0b00011111,\n 0b00111111,\n 0b01111111,\n 0b11111111\n]\n\nmodule.exports = class ConsumableBuffer {\n /**\n * @param {Uint8Array} value\n */\n constructor (value) {\n this._value = value\n this._currentBytePos = value.length - 1\n this._currentBitPos = 7\n }\n\n availableBits () {\n return this._currentBitPos + 1 + this._currentBytePos * 8\n }\n\n totalBits () {\n return this._value.length * 8\n }\n\n /**\n * @param {number} bits\n */\n take (bits) {\n let pendingBits = bits\n let result = 0\n while (pendingBits && this._haveBits()) {\n const byte = this._value[this._currentBytePos]\n const availableBits = this._currentBitPos + 1\n const taking = Math.min(availableBits, pendingBits)\n const value = byteBitsToInt(byte, availableBits - taking, taking)\n result = (result << taking) + value\n\n pendingBits -= taking\n\n this._currentBitPos -= taking\n if (this._currentBitPos < 0) {\n this._currentBitPos = 7\n this._currentBytePos--\n }\n }\n\n return result\n }\n\n /**\n * @param {number} bits\n */\n untake (bits) {\n this._currentBitPos += bits\n while (this._currentBitPos > 7) {\n this._currentBitPos -= 8\n this._currentBytePos += 1\n }\n }\n\n _haveBits () {\n return this._currentBytePos >= 0\n }\n}\n\n/**\n * @param {number} byte\n * @param {number} start\n * @param {number} length\n */\nfunction byteBitsToInt (byte, start, length) {\n const mask = maskFor(start, length)\n return (byte & mask) >>> start\n}\n\n/**\n * @param {number} start\n * @param {number} length\n */\nfunction maskFor (start, length) {\n return START_MASKS[start] & STOP_MASKS[Math.min(length + start - 1, 7)]\n}\n","'use strict'\n\nconst ConsumableBuffer = require('./consumable-buffer')\nconst uint8ArrayConcat = require('uint8arrays/concat')\n\n/**\n * @param {(value: Uint8Array) => Promise} hashFn\n */\nfunction wrapHash (hashFn) {\n /**\n * @param {InfiniteHash | Uint8Array} value\n */\n function hashing (value) {\n if (value instanceof InfiniteHash) {\n // already a hash. return it\n return value\n } else {\n return new InfiniteHash(value, hashFn)\n }\n }\n\n return hashing\n}\n\nclass InfiniteHash {\n /**\n *\n * @param {Uint8Array} value\n * @param {(value: Uint8Array) => Promise} hashFn\n */\n constructor (value, hashFn) {\n if (!(value instanceof Uint8Array)) {\n throw new Error('can only hash Uint8Arrays')\n }\n\n this._value = value\n this._hashFn = hashFn\n this._depth = -1\n this._availableBits = 0\n this._currentBufferIndex = 0\n\n /** @type {ConsumableBuffer[]} */\n this._buffers = []\n }\n\n /**\n * @param {number} bits\n */\n async take (bits) {\n let pendingBits = bits\n\n while (this._availableBits < pendingBits) {\n await this._produceMoreBits()\n }\n\n let result = 0\n\n while (pendingBits > 0) {\n const hash = this._buffers[this._currentBufferIndex]\n const available = Math.min(hash.availableBits(), pendingBits)\n const took = hash.take(available)\n result = (result << available) + took\n pendingBits -= available\n this._availableBits -= available\n\n if (hash.availableBits() === 0) {\n this._currentBufferIndex++\n }\n }\n\n return result\n }\n\n /**\n * @param {number} bits\n */\n untake (bits) {\n let pendingBits = bits\n\n while (pendingBits > 0) {\n const hash = this._buffers[this._currentBufferIndex]\n const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits)\n hash.untake(availableForUntake)\n pendingBits -= availableForUntake\n this._availableBits += availableForUntake\n\n if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) {\n this._depth--\n this._currentBufferIndex--\n }\n }\n }\n\n async _produceMoreBits () {\n this._depth++\n\n const value = this._depth ? uint8ArrayConcat([this._value, Uint8Array.from([this._depth])]) : this._value\n const hashValue = await this._hashFn(value)\n const buffer = new ConsumableBuffer(hashValue)\n\n this._buffers.push(buffer)\n this._availableBits += buffer.availableBits()\n }\n}\n\nmodule.exports = wrapHash\nmodule.exports.InfiniteHash = InfiniteHash\n","'use strict'\n\nconst Bucket = require('./bucket')\nconst wrapHash = require('./consumable-hash')\n\n/**\n * @typedef {object} UserBucketOptions\n * @property {(value: Uint8Array) => Promise} hashFn\n * @property {number} [bits=8]\n */\n\n/**\n * @param {UserBucketOptions} options\n */\nfunction createHAMT (options) {\n if (!options || !options.hashFn) {\n throw new Error('please define an options.hashFn')\n }\n\n const bucketOptions = {\n bits: options.bits || 8,\n hash: wrapHash(options.hashFn)\n }\n\n return new Bucket(bucketOptions)\n}\n\nmodule.exports = {\n createHAMT,\n Bucket\n}\n","'use strict'\n\nconst drain = require('it-drain')\nconst filter = require('it-filter')\nconst take = require('it-take')\nconst all = require('it-all')\n\n/**\n * Collect all values from the iterable and sort them using\n * the passed sorter function\n *\n * @template T\n * @param {AsyncIterable | Iterable} iterable\n * @param {(a: T, b: T) => -1 | 0 | 1} sorter\n * @returns {AsyncIterable}\n */\nconst sortAll = (iterable, sorter) => {\n return (async function * () {\n const values = await all(iterable)\n yield * values.sort(sorter)\n })()\n}\n\n/**\n * @typedef {import('./types').Options} Options\n * @typedef {import('./types').Pair} Pair\n * @typedef {import('./types').Blockstore} Blockstore\n * @typedef {import('./types').Query} Query\n * @typedef {import('./types').KeyQuery} KeyQuery\n * @typedef {import('./types').Batch} Batch\n *\n * @typedef {import('multiformats').CID} CID\n */\n\n/**\n * @template O\n * @typedef {import('interface-store').AwaitIterable} AwaitIterable\n */\n\n/**\n * @implements {Blockstore}\n */\nclass BlockstoreAdapter {\n /**\n * @returns {Promise}\n */\n open () {\n return Promise.reject(new Error('.open is not implemented'))\n }\n\n /**\n * @returns {Promise}\n */\n close () {\n return Promise.reject(new Error('.close is not implemented'))\n }\n\n /**\n * @param {CID} key\n * @param {Uint8Array} val\n * @param {Options} [options]\n * @returns {Promise}\n */\n put (key, val, options) {\n return Promise.reject(new Error('.put is not implemented'))\n }\n\n /**\n * @param {CID} key\n * @param {Options} [options]\n * @returns {Promise}\n */\n get (key, options) {\n return Promise.reject(new Error('.get is not implemented'))\n }\n\n /**\n * @param {CID} key\n * @param {Options} [options]\n * @returns {Promise}\n */\n has (key, options) {\n return Promise.reject(new Error('.has is not implemented'))\n }\n\n /**\n * @param {CID} key\n * @param {Options} [options]\n * @returns {Promise}\n */\n delete (key, options) {\n return Promise.reject(new Error('.delete is not implemented'))\n }\n\n /**\n * @param {AwaitIterable} source\n * @param {Options} [options]\n * @returns {AsyncIterable}\n */\n async * putMany (source, options = {}) {\n for await (const { key, value } of source) {\n await this.put(key, value, options)\n yield { key, value }\n }\n }\n\n /**\n * @param {AwaitIterable} source\n * @param {Options} [options]\n * @returns {AsyncIterable}\n */\n async * getMany (source, options = {}) {\n for await (const key of source) {\n yield this.get(key, options)\n }\n }\n\n /**\n * @param {AwaitIterable} source\n * @param {Options} [options]\n * @returns {AsyncIterable}\n */\n async * deleteMany (source, options = {}) {\n for await (const key of source) {\n await this.delete(key, options)\n yield key\n }\n }\n\n /**\n * @returns {Batch}\n */\n batch () {\n /** @type {Pair[]} */\n let puts = []\n /** @type {CID[]} */\n let dels = []\n\n return {\n put (key, value) {\n puts.push({ key, value })\n },\n\n delete (key) {\n dels.push(key)\n },\n commit: async (options) => {\n await drain(this.putMany(puts, options))\n puts = []\n await drain(this.deleteMany(dels, options))\n dels = []\n }\n }\n }\n\n /**\n * Extending classes should override `query` or implement this method\n *\n * @param {Query} q\n * @param {Options} [options]\n * @returns {AsyncIterable}\n */\n // eslint-disable-next-line require-yield\n async * _all (q, options) {\n throw new Error('._all is not implemented')\n }\n\n /**\n * Extending classes should override `queryKeys` or implement this method\n *\n * @param {KeyQuery} q\n * @param {Options} [options]\n * @returns {AsyncIterable}\n */\n // eslint-disable-next-line require-yield\n async * _allKeys (q, options) {\n throw new Error('._allKeys is not implemented')\n }\n\n /**\n * @param {Query} q\n * @param {Options} [options]\n */\n query (q, options) {\n let it = this._all(q, options)\n\n if (q.prefix != null) {\n it = filter(it, (/** @type {Pair} */ e) =>\n e.key.toString().startsWith(q.prefix || '')\n )\n }\n\n if (Array.isArray(q.filters)) {\n it = q.filters.reduce((it, f) => filter(it, f), it)\n }\n\n if (Array.isArray(q.orders)) {\n it = q.orders.reduce((it, f) => sortAll(it, f), it)\n }\n\n if (q.offset != null) {\n let i = 0\n it = filter(it, () => i++ >= (q.offset || 0))\n }\n\n if (q.limit != null) {\n it = take(it, q.limit)\n }\n\n return it\n }\n\n /**\n * @param {KeyQuery} q\n * @param {Options} [options]\n */\n queryKeys (q, options) {\n let it = this._allKeys(q, options)\n\n if (q.prefix != null) {\n it = filter(it, (/** @type {CID} */ cid) => cid.toString().startsWith(q.prefix || ''))\n }\n\n if (Array.isArray(q.filters)) {\n it = q.filters.reduce((it, f) => filter(it, f), it)\n }\n\n if (Array.isArray(q.orders)) {\n it = q.orders.reduce((it, f) => sortAll(it, f), it)\n }\n\n if (q.offset != null) {\n let i = 0\n it = filter(it, () => i++ >= /** @type {number} */ (q.offset))\n }\n\n if (q.limit != null) {\n it = take(it, q.limit)\n }\n\n return it\n }\n}\n\nmodule.exports = BlockstoreAdapter\n","'use strict'\n\nconst errCode = require('err-code')\n\n/**\n * @param {Error} [err]\n */\nfunction notFoundError (err) {\n err = err || new Error('Not Found')\n return errCode(err, 'ERR_NOT_FOUND')\n}\n\nmodule.exports = {\n notFoundError\n}\n","'use strict'\n\nconst BlockstoreAdapter = require('./adapter')\nconst MemoryBlockstore = require('./memory')\n\n/**\n * @typedef {import('./types').Options} Options\n * @typedef {import('./types').Pair} Pair\n * @typedef {import('./types').Batch} Batch\n * @typedef {import('./types').Blockstore} Blockstore\n * @typedef {import('./types').QueryFilter} QueryFilter\n * @typedef {import('./types').QueryOrder} QueryOrder\n * @typedef {import('./types').Query} Query\n * @typedef {import('./types').KeyQueryFilter} KeyQueryFilter\n * @typedef {import('./types').KeyQueryOrder} KeyQueryOrder\n * @typedef {import('./types').KeyQuery} KeyQuery\n */\n\nmodule.exports = {\n BlockstoreAdapter,\n MemoryBlockstore\n}\n","'use strict'\n\nconst Adapter = require('./adapter')\nconst { base32 } = require('multiformats/bases/base32')\nconst raw = require('multiformats/codecs/raw')\nconst { CID } = require('multiformats/cid')\nconst Digest = require('multiformats/hashes/digest')\nconst Errors = require('./errors')\n\n/**\n * @typedef {import('./types').Pair} Pair\n * @typedef {import('./types').Blockstore} Blockstore\n * @typedef {import('interface-store').Options} Options\n */\n\n/**\n * @class MemoryBlockstore\n * @implements {Blockstore}\n */\nclass MemoryBlockstore extends Adapter {\n constructor () {\n super()\n\n /** @type {Record} */\n this.data = {}\n }\n\n open () {\n return Promise.resolve()\n }\n\n close () {\n return Promise.resolve()\n }\n\n /**\n * @param {CID} key\n * @param {Uint8Array} val\n */\n async put (key, val) { // eslint-disable-line require-await\n this.data[base32.encode(key.multihash.bytes)] = val\n }\n\n /**\n * @param {CID} key\n */\n async get (key) {\n const exists = await this.has(key)\n if (!exists) throw Errors.notFoundError()\n return this.data[base32.encode(key.multihash.bytes)]\n }\n\n /**\n * @param {CID} key\n */\n async has (key) { // eslint-disable-line require-await\n return this.data[base32.encode(key.multihash.bytes)] !== undefined\n }\n\n /**\n * @param {CID} key\n */\n async delete (key) { // eslint-disable-line require-await\n delete this.data[base32.encode(key.multihash.bytes)]\n }\n\n async * _all () {\n yield * Object.entries(this.data)\n .map(([key, value]) => ({ key: CID.createV1(raw.code, Digest.decode(base32.decode(key))), value }))\n }\n\n async * _allKeys () {\n yield * Object.entries(this.data)\n .map(([key]) => CID.createV1(raw.code, Digest.decode(base32.decode(key))))\n }\n}\n\nmodule.exports = MemoryBlockstore\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FsBlockStore = void 0;\nconst fs_1 = __importDefault(require(\"fs\"));\nconst os_1 = __importDefault(require(\"os\"));\nconst multiformats_1 = require(\"multiformats\");\nconst interface_blockstore_1 = require(\"interface-blockstore\");\nclass FsBlockStore extends interface_blockstore_1.BlockstoreAdapter {\n constructor() {\n super();\n this.path = `${os_1.default.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`;\n this._opened = false;\n }\n async _open() {\n if (this._opening) {\n await this._opening;\n }\n else {\n this._opening = fs_1.default.promises.mkdir(this.path);\n await this._opening;\n this._opened = true;\n }\n }\n async put(cid, bytes) {\n if (!this._opened) {\n await this._open();\n }\n const cidStr = cid.toString();\n const location = `${this.path}/${cidStr}`;\n await fs_1.default.promises.writeFile(location, bytes);\n }\n async get(cid) {\n if (!this._opened) {\n await this._open();\n }\n const cidStr = cid.toString();\n const location = `${this.path}/${cidStr}`;\n const bytes = await fs_1.default.promises.readFile(location);\n return bytes;\n }\n async *blocks() {\n if (!this._opened) {\n await this._open();\n }\n const cids = await fs_1.default.promises.readdir(this.path);\n for (const cidStr of cids) {\n const location = `${this.path}/${cidStr}`;\n const bytes = await fs_1.default.promises.readFile(location);\n yield { cid: multiformats_1.CID.parse(cidStr), bytes };\n }\n }\n async close() {\n if (this._opened) {\n await fs_1.default.promises.rm(this.path, { recursive: true });\n }\n this._opened = false;\n }\n}\nexports.FsBlockStore = FsBlockStore;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MemoryBlockStore = void 0;\nconst multiformats_1 = require(\"multiformats\");\nconst interface_blockstore_1 = require(\"interface-blockstore\");\nclass MemoryBlockStore extends interface_blockstore_1.BlockstoreAdapter {\n constructor() {\n super();\n this.store = new Map();\n }\n async *blocks() {\n for (const [cidStr, bytes] of this.store.entries()) {\n yield { cid: multiformats_1.CID.parse(cidStr), bytes };\n }\n }\n put(cid, bytes) {\n this.store.set(cid.toString(), bytes);\n return Promise.resolve();\n }\n get(cid) {\n const bytes = this.store.get(cid.toString());\n if (!bytes) {\n throw new Error(`block with cid ${cid.toString()} no found`);\n }\n return Promise.resolve(bytes);\n }\n close() {\n this.store.clear();\n return Promise.resolve();\n }\n}\nexports.MemoryBlockStore = MemoryBlockStore;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.unixfsImporterOptionsDefault = void 0;\nconst sha2_1 = require(\"multiformats/hashes/sha2\");\nexports.unixfsImporterOptionsDefault = {\n cidVersion: 1,\n chunker: 'fixed',\n maxChunkSize: 262144,\n hasher: sha2_1.sha256,\n rawLeaves: true,\n wrapWithDirectory: true\n};\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.pack = void 0;\nconst it_last_1 = __importDefault(require(\"it-last\"));\nconst it_pipe_1 = __importDefault(require(\"it-pipe\"));\nconst car_1 = require(\"@ipld/car\");\nconst ipfs_unixfs_importer_1 = require(\"ipfs-unixfs-importer\");\nconst index_js_1 = __importDefault(require(\"ipfs-core-utils/src/files/normalise-input/index.js\"));\nconst memory_1 = require(\"../blockstore/memory\");\nconst constants_1 = require(\"./constants\");\nasync function pack({ input, blockstore: userBlockstore, maxChunkSize, wrapWithDirectory }) {\n if (!input || (Array.isArray(input) && !input.length)) {\n throw new Error('missing input file(s)');\n }\n const blockstore = userBlockstore ? userBlockstore : new memory_1.MemoryBlockStore();\n // Consume the source\n const rootEntry = await it_last_1.default(it_pipe_1.default(index_js_1.default(input), (source) => ipfs_unixfs_importer_1.importer(source, blockstore, {\n ...constants_1.unixfsImporterOptionsDefault,\n maxChunkSize: maxChunkSize || constants_1.unixfsImporterOptionsDefault.maxChunkSize,\n wrapWithDirectory: wrapWithDirectory === false ? false : constants_1.unixfsImporterOptionsDefault.wrapWithDirectory\n })));\n if (!rootEntry || !rootEntry.cid) {\n throw new Error('given input could not be parsed correctly');\n }\n const root = rootEntry.cid;\n const { writer, out } = await car_1.CarWriter.create([root]);\n for await (const block of blockstore.blocks()) {\n writer.put(block);\n }\n writer.close();\n if (!userBlockstore) {\n await blockstore.close();\n }\n return { root, out };\n}\nexports.pack = pack;\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.unpackStream = exports.unpack = void 0;\nconst browser_readablestream_to_it_1 = __importDefault(require(\"browser-readablestream-to-it\"));\nconst iterator_1 = require(\"@ipld/car/iterator\");\nconst ipfs_unixfs_exporter_1 = __importDefault(require(\"ipfs-unixfs-exporter\"));\nconst verifying_get_only_blockstore_1 = require(\"./utils/verifying-get-only-blockstore\");\nconst memory_1 = require(\"../blockstore/memory\");\n// Export unixfs entries from car file\nasync function* unpack(carReader, roots) {\n const verifyingBlockService = verifying_get_only_blockstore_1.VerifyingGetOnlyBlockStore.fromCarReader(carReader);\n if (!roots || roots.length === 0) {\n roots = await carReader.getRoots();\n }\n for (const root of roots) {\n yield* ipfs_unixfs_exporter_1.default.recursive(root, verifyingBlockService, { /* options */});\n }\n}\nexports.unpack = unpack;\nasync function* unpackStream(readable, { roots, blockstore: userBlockstore } = {}) {\n const carIterator = await iterator_1.CarBlockIterator.fromIterable(asAsyncIterable(readable));\n const blockstore = userBlockstore || new memory_1.MemoryBlockStore();\n for await (const block of carIterator) {\n await blockstore.put(block.cid, block.bytes);\n }\n const verifyingBlockStore = verifying_get_only_blockstore_1.VerifyingGetOnlyBlockStore.fromBlockstore(blockstore);\n if (!roots || roots.length === 0) {\n roots = await carIterator.getRoots();\n }\n for (const root of roots) {\n yield* ipfs_unixfs_exporter_1.default.recursive(root, verifyingBlockStore);\n }\n}\nexports.unpackStream = unpackStream;\n/**\n * Upgrade a ReadableStream to an AsyncIterable if it isn't already\n *\n * ReadableStream (e.g res.body) is asyncIterable in node, but not in chrome, yet.\n * see: https://bugs.chromium.org/p/chromium/issues/detail?id=929585\n */\nfunction asAsyncIterable(readable) {\n // @ts-ignore how to convince tsc that we are checking the type here?\n return Symbol.asyncIterator in readable ? readable : browser_readablestream_to_it_1.default(readable);\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.VerifyingGetOnlyBlockStore = void 0;\nconst uint8arrays_1 = require(\"uint8arrays\");\nconst sha2_1 = require(\"multiformats/hashes/sha2\");\nconst interface_blockstore_1 = require(\"interface-blockstore\");\nclass VerifyingGetOnlyBlockStore extends interface_blockstore_1.BlockstoreAdapter {\n constructor(blockstore) {\n super();\n this.store = blockstore;\n }\n async get(cid) {\n const res = await this.store.get(cid);\n if (!res) {\n throw new Error(`Incomplete CAR. Block missing for CID ${cid}`);\n }\n if (!isValid({ cid, bytes: res })) {\n throw new Error(`Invalid CAR. Hash of block data does not match CID ${cid}`);\n }\n return res;\n }\n static fromBlockstore(b) {\n return new VerifyingGetOnlyBlockStore(b);\n }\n static fromCarReader(cr) {\n return new VerifyingGetOnlyBlockStore({\n // Return bytes in the same fashion as a Blockstore implementation\n get: async (cid) => {\n const block = await cr.get(cid);\n return block === null || block === void 0 ? void 0 : block.bytes;\n }\n });\n }\n}\nexports.VerifyingGetOnlyBlockStore = VerifyingGetOnlyBlockStore;\nasync function isValid({ cid, bytes }) {\n const hash = await sha2_1.sha256.digest(bytes);\n return uint8arrays_1.equals(hash.digest, cid.multihash.digest);\n}\n","'use strict'\n\nconst normaliseContent = require('./normalise-content')\nconst normaliseInput = require('./normalise-input')\n\n/**\n * @typedef {import('ipfs-core-types/src/utils').ImportCandidateStream} ImportCandidateStream\n * @typedef {import('ipfs-unixfs-importer').ImportCandidate} ImportCandidate\n */\n\n/**\n * Transforms any of the `ipfs.add` input types into\n *\n * ```\n * AsyncIterable<{ path, mode, mtime, content: AsyncIterable }>\n * ```\n *\n * See https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options\n *\n * @param {ImportCandidateStream} input\n * @returns {AsyncGenerator}\n */\nmodule.exports = (input) => normaliseInput(input, normaliseContent)\n","'use strict'\n\nconst errCode = require('err-code')\nconst uint8ArrayFromString = require('uint8arrays/from-string')\nconst browserStreamToIt = require('browser-readablestream-to-it')\nconst blobToIt = require('blob-to-it')\nconst itPeekable = require('it-peekable')\nconst all = require('it-all')\nconst map = require('it-map')\nconst {\n isBytes,\n isReadableStream,\n isBlob\n} = require('./utils')\n\n/**\n * @param {import('./normalise-input').ToContent} input\n */\nasync function * toAsyncIterable (input) {\n // Bytes | String\n if (isBytes(input)) {\n yield toBytes(input)\n return\n }\n\n if (typeof input === 'string' || input instanceof String) {\n yield toBytes(input.toString())\n return\n }\n\n // Blob\n if (isBlob(input)) {\n yield * blobToIt(input)\n return\n }\n\n // Browser stream\n if (isReadableStream(input)) {\n input = browserStreamToIt(input)\n }\n\n // (Async)Iterator>\n if (Symbol.iterator in input || Symbol.asyncIterator in input) {\n /** @type {any} peekable */\n const peekable = itPeekable(input)\n\n /** @type {any} value */\n const { value, done } = await peekable.peek()\n\n if (done) {\n // make sure empty iterators result in empty files\n yield * []\n return\n }\n\n peekable.push(value)\n\n // (Async)Iterable\n if (Number.isInteger(value)) {\n yield Uint8Array.from((await all(peekable)))\n return\n }\n\n // (Async)Iterable\n if (isBytes(value) || typeof value === 'string' || value instanceof String) {\n yield * map(peekable, toBytes)\n return\n }\n }\n\n throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT')\n}\n\n/**\n * @param {ArrayBuffer | ArrayBufferView | string | InstanceType | number[]} chunk\n */\nfunction toBytes (chunk) {\n if (chunk instanceof Uint8Array) {\n return chunk\n }\n\n if (ArrayBuffer.isView(chunk)) {\n return new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength)\n }\n\n if (chunk instanceof ArrayBuffer) {\n return new Uint8Array(chunk)\n }\n\n if (Array.isArray(chunk)) {\n return Uint8Array.from(chunk)\n }\n\n return uint8ArrayFromString(chunk.toString())\n}\n\nmodule.exports = toAsyncIterable\n","'use strict'\n\nconst errCode = require('err-code')\nconst browserStreamToIt = require('browser-readablestream-to-it')\nconst itPeekable = require('it-peekable')\nconst map = require('it-map')\nconst {\n isBytes,\n isBlob,\n isReadableStream,\n isFileObject\n} = require('./utils')\nconst {\n parseMtime,\n parseMode\n} = require('ipfs-unixfs')\n\n/**\n * @typedef {import('ipfs-core-types/src/utils').ToContent} ToContent\n * @typedef {import('ipfs-unixfs-importer').ImportCandidate} ImporterImportCandidate\n * @typedef {import('ipfs-core-types/src/utils').ImportCandidate} ImportCandidate\n */\n\n/**\n * @param {import('ipfs-core-types/src/utils').ImportCandidateStream} input\n * @param {(content:ToContent) => AsyncIterable} normaliseContent\n */\n// eslint-disable-next-line complexity\nmodule.exports = async function * normaliseInput (input, normaliseContent) {\n if (input === null || input === undefined) {\n return\n }\n\n // String\n if (typeof input === 'string' || input instanceof String) {\n yield toFileObject(input.toString(), normaliseContent)\n return\n }\n\n // Uint8Array|ArrayBuffer|TypedArray\n // Blob|File\n if (isBytes(input) || isBlob(input)) {\n yield toFileObject(input, normaliseContent)\n return\n }\n\n // Browser ReadableStream\n if (isReadableStream(input)) {\n input = browserStreamToIt(input)\n }\n\n // Iterable>\n if (Symbol.iterator in input || Symbol.asyncIterator in input) {\n /** @type {any} peekable */\n const peekable = itPeekable(input)\n\n /** @type {any} value **/\n const { value, done } = await peekable.peek()\n\n if (done) {\n // make sure empty iterators result in empty files\n yield * []\n return\n }\n\n peekable.push(value)\n\n // (Async)Iterable\n // (Async)Iterable\n if (Number.isInteger(value) || isBytes(value)) {\n yield toFileObject(peekable, normaliseContent)\n return\n }\n\n // (Async)Iterable\n // (Async)Iterable\n // (Async)Iterable<{ path, content }>\n if (isFileObject(value) || isBlob(value) || typeof value === 'string' || value instanceof String) {\n yield * map(peekable, (/** @type {ImportCandidate} */ value) => toFileObject(value, normaliseContent))\n return\n }\n\n // (Async)Iterable<(Async)Iterable>>\n // (Async)Iterable>\n // ReadableStream<(Async)Iterable>>\n // ReadableStream>\n if (value[Symbol.iterator] || value[Symbol.asyncIterator] || isReadableStream(value)) {\n yield * map(peekable, (/** @type {ImportCandidate} */ value) => toFileObject(value, normaliseContent))\n return\n }\n }\n\n // { path, content: ? }\n // Note: Detected _after_ (Async)Iterable> because Node.js streams have a\n // `path` property that passes this check.\n if (isFileObject(input)) {\n yield toFileObject(input, normaliseContent)\n return\n }\n\n throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT')\n}\n\n/**\n * @param {ImportCandidate} input\n * @param {(content:ToContent) => AsyncIterable} normaliseContent\n */\nasync function toFileObject (input, normaliseContent) {\n // @ts-ignore - Those properties don't exist on most input types\n const { path, mode, mtime, content } = input\n\n /** @type {ImporterImportCandidate} */\n const file = {\n path: path || '',\n mode: parseMode(mode),\n mtime: parseMtime(mtime)\n }\n\n if (content) {\n file.content = await normaliseContent(content)\n } else if (!path) { // Not already a file object with path or content prop\n // @ts-ignore - input still can be different ToContent\n file.content = await normaliseContent(input)\n }\n\n return file\n}\n","'use strict'\n\nconst { Blob } = globalThis\n\n/**\n * @param {any} obj\n * @returns {obj is ArrayBufferView|ArrayBuffer}\n */\nfunction isBytes (obj) {\n return ArrayBuffer.isView(obj) || obj instanceof ArrayBuffer\n}\n\n/**\n * @param {any} obj\n * @returns {obj is Blob}\n */\nfunction isBlob (obj) {\n return typeof Blob !== 'undefined' && obj instanceof Blob\n}\n\n/**\n * An object with a path or content property\n *\n * @param {any} obj\n * @returns {obj is import('ipfs-core-types/src/utils').ImportCandidate}\n */\nfunction isFileObject (obj) {\n return typeof obj === 'object' && (obj.path || obj.content)\n}\n\n/**\n * @param {any} value\n * @returns {value is ReadableStream}\n */\nconst isReadableStream = (value) =>\n value && typeof value.getReader === 'function'\n\nmodule.exports = {\n isBytes,\n isBlob,\n isFileObject,\n isReadableStream\n}\n","'use strict'\n\nconst {\n Data: PBData\n} = require('./unixfs')\nconst errcode = require('err-code')\n\n/**\n * @typedef {import('./types').Mtime} Mtime\n * @typedef {import('./types').MtimeLike} MtimeLike\n */\n\nconst types = [\n 'raw',\n 'directory',\n 'file',\n 'metadata',\n 'symlink',\n 'hamt-sharded-directory'\n]\n\nconst dirTypes = [\n 'directory',\n 'hamt-sharded-directory'\n]\n\nconst DEFAULT_FILE_MODE = parseInt('0644', 8)\nconst DEFAULT_DIRECTORY_MODE = parseInt('0755', 8)\n\n/**\n * @param {string | number | undefined} [mode]\n */\nfunction parseMode (mode) {\n if (mode == null) {\n return undefined\n }\n\n if (typeof mode === 'number') {\n return mode & 0xFFF\n }\n\n mode = mode.toString()\n\n if (mode.substring(0, 1) === '0') {\n // octal string\n return parseInt(mode, 8) & 0xFFF\n }\n\n // decimal string\n return parseInt(mode, 10) & 0xFFF\n}\n\n/**\n * @param {any} input\n */\nfunction parseMtime (input) {\n if (input == null) {\n return undefined\n }\n\n /** @type {Mtime | undefined} */\n let mtime\n\n // { secs, nsecs }\n if (input.secs != null) {\n mtime = {\n secs: input.secs,\n nsecs: input.nsecs\n }\n }\n\n // UnixFS TimeSpec\n if (input.Seconds != null) {\n mtime = {\n secs: input.Seconds,\n nsecs: input.FractionalNanoseconds\n }\n }\n\n // process.hrtime()\n if (Array.isArray(input)) {\n mtime = {\n secs: input[0],\n nsecs: input[1]\n }\n }\n\n // Javascript Date\n if (input instanceof Date) {\n const ms = input.getTime()\n const secs = Math.floor(ms / 1000)\n\n mtime = {\n secs: secs,\n nsecs: (ms - (secs * 1000)) * 1000\n }\n }\n\n /*\n TODO: https://github.com/ipfs/aegir/issues/487\n\n // process.hrtime.bigint()\n if (input instanceof BigInt) {\n const secs = input / BigInt(1e9)\n const nsecs = input - (secs * BigInt(1e9))\n\n mtime = {\n secs: parseInt(secs.toString()),\n nsecs: parseInt(nsecs.toString())\n }\n }\n */\n\n if (!Object.prototype.hasOwnProperty.call(mtime, 'secs')) {\n return undefined\n }\n\n if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) {\n throw errcode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS')\n }\n\n return mtime\n}\n\nclass Data {\n /**\n * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md\n *\n * @param {Uint8Array} marshaled\n */\n static unmarshal (marshaled) {\n const message = PBData.decode(marshaled)\n const decoded = PBData.toObject(message, {\n defaults: false,\n arrays: true,\n longs: Number,\n objects: false\n })\n\n const data = new Data({\n type: types[decoded.Type],\n data: decoded.Data,\n blockSizes: decoded.blocksizes,\n mode: decoded.mode,\n mtime: decoded.mtime\n ? {\n secs: decoded.mtime.Seconds,\n nsecs: decoded.mtime.FractionalNanoseconds\n }\n : undefined\n })\n\n // make sure we honour the original mode\n data._originalMode = decoded.mode || 0\n\n return data\n }\n\n /**\n * @param {object} [options]\n * @param {string} [options.type='file']\n * @param {Uint8Array} [options.data]\n * @param {number[]} [options.blockSizes]\n * @param {number} [options.hashType]\n * @param {number} [options.fanout]\n * @param {MtimeLike | null} [options.mtime]\n * @param {number | string} [options.mode]\n */\n constructor (options = {\n type: 'file'\n }) {\n const {\n type,\n data,\n blockSizes,\n hashType,\n fanout,\n mtime,\n mode\n } = options\n\n if (type && !types.includes(type)) {\n throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')\n }\n\n this.type = type || 'file'\n this.data = data\n this.hashType = hashType\n this.fanout = fanout\n\n /** @type {number[]} */\n this.blockSizes = blockSizes || []\n this._originalMode = 0\n this.mode = parseMode(mode)\n\n if (mtime) {\n this.mtime = parseMtime(mtime)\n\n if (this.mtime && !this.mtime.nsecs) {\n this.mtime.nsecs = 0\n }\n }\n }\n\n /**\n * @param {number | undefined} mode\n */\n set mode (mode) {\n this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE\n\n const parsedMode = parseMode(mode)\n\n if (parsedMode !== undefined) {\n this._mode = parsedMode\n }\n }\n\n /**\n * @returns {number | undefined}\n */\n get mode () {\n return this._mode\n }\n\n isDirectory () {\n return Boolean(this.type && dirTypes.includes(this.type))\n }\n\n /**\n * @param {number} size\n */\n addBlockSize (size) {\n this.blockSizes.push(size)\n }\n\n /**\n * @param {number} index\n */\n removeBlockSize (index) {\n this.blockSizes.splice(index, 1)\n }\n\n /**\n * Returns `0` for directories or `data.length + sum(blockSizes)` for everything else\n */\n fileSize () {\n if (this.isDirectory()) {\n // dirs don't have file size\n return 0\n }\n\n let sum = 0\n this.blockSizes.forEach((size) => {\n sum += size\n })\n\n if (this.data) {\n sum += this.data.length\n }\n\n return sum\n }\n\n /**\n * encode to protobuf Uint8Array\n */\n marshal () {\n let type\n\n switch (this.type) {\n case 'raw': type = PBData.DataType.Raw; break\n case 'directory': type = PBData.DataType.Directory; break\n case 'file': type = PBData.DataType.File; break\n case 'metadata': type = PBData.DataType.Metadata; break\n case 'symlink': type = PBData.DataType.Symlink; break\n case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break\n default:\n throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')\n }\n\n let data = this.data\n\n if (!this.data || !this.data.length) {\n data = undefined\n }\n\n let mode\n\n if (this.mode != null) {\n mode = (this._originalMode & 0xFFFFF000) | (parseMode(this.mode) || 0)\n\n if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {\n mode = undefined\n }\n\n if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {\n mode = undefined\n }\n }\n\n let mtime\n\n if (this.mtime != null) {\n const parsed = parseMtime(this.mtime)\n\n if (parsed) {\n mtime = {\n Seconds: parsed.secs,\n FractionalNanoseconds: parsed.nsecs\n }\n\n if (mtime.FractionalNanoseconds === 0) {\n delete mtime.FractionalNanoseconds\n }\n }\n }\n\n const pbData = {\n Type: type,\n Data: data,\n filesize: this.isDirectory() ? undefined : this.fileSize(),\n blocksizes: this.blockSizes,\n hashType: this.hashType,\n fanout: this.fanout,\n mode,\n mtime\n }\n\n return PBData.encode(pbData).finish()\n }\n}\n\nmodule.exports = {\n UnixFS: Data,\n parseMode,\n parseMtime\n}\n","/*eslint-disable*/\n\"use strict\";\n\nvar $protobuf = require(\"protobufjs/minimal\");\n\n// Common aliases\nvar $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;\n\n// Exported root namespace\nvar $root = $protobuf.roots[\"ipfs-unixfs\"] || ($protobuf.roots[\"ipfs-unixfs\"] = {});\n\n$root.Data = (function() {\n\n /**\n * Properties of a Data.\n * @exports IData\n * @interface IData\n * @property {Data.DataType} Type Data Type\n * @property {Uint8Array|null} [Data] Data Data\n * @property {number|null} [filesize] Data filesize\n * @property {Array.|null} [blocksizes] Data blocksizes\n * @property {number|null} [hashType] Data hashType\n * @property {number|null} [fanout] Data fanout\n * @property {number|null} [mode] Data mode\n * @property {IUnixTime|null} [mtime] Data mtime\n */\n\n /**\n * Constructs a new Data.\n * @exports Data\n * @classdesc Represents a Data.\n * @implements IData\n * @constructor\n * @param {IData=} [p] Properties to set\n */\n function Data(p) {\n this.blocksizes = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * Data Type.\n * @member {Data.DataType} Type\n * @memberof Data\n * @instance\n */\n Data.prototype.Type = 0;\n\n /**\n * Data Data.\n * @member {Uint8Array} Data\n * @memberof Data\n * @instance\n */\n Data.prototype.Data = $util.newBuffer([]);\n\n /**\n * Data filesize.\n * @member {number} filesize\n * @memberof Data\n * @instance\n */\n Data.prototype.filesize = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data blocksizes.\n * @member {Array.} blocksizes\n * @memberof Data\n * @instance\n */\n Data.prototype.blocksizes = $util.emptyArray;\n\n /**\n * Data hashType.\n * @member {number} hashType\n * @memberof Data\n * @instance\n */\n Data.prototype.hashType = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data fanout.\n * @member {number} fanout\n * @memberof Data\n * @instance\n */\n Data.prototype.fanout = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data mode.\n * @member {number} mode\n * @memberof Data\n * @instance\n */\n Data.prototype.mode = 0;\n\n /**\n * Data mtime.\n * @member {IUnixTime|null|undefined} mtime\n * @memberof Data\n * @instance\n */\n Data.prototype.mtime = null;\n\n /**\n * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages.\n * @function encode\n * @memberof Data\n * @static\n * @param {IData} m Data message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Data.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n w.uint32(8).int32(m.Type);\n if (m.Data != null && Object.hasOwnProperty.call(m, \"Data\"))\n w.uint32(18).bytes(m.Data);\n if (m.filesize != null && Object.hasOwnProperty.call(m, \"filesize\"))\n w.uint32(24).uint64(m.filesize);\n if (m.blocksizes != null && m.blocksizes.length) {\n for (var i = 0; i < m.blocksizes.length; ++i)\n w.uint32(32).uint64(m.blocksizes[i]);\n }\n if (m.hashType != null && Object.hasOwnProperty.call(m, \"hashType\"))\n w.uint32(40).uint64(m.hashType);\n if (m.fanout != null && Object.hasOwnProperty.call(m, \"fanout\"))\n w.uint32(48).uint64(m.fanout);\n if (m.mode != null && Object.hasOwnProperty.call(m, \"mode\"))\n w.uint32(56).uint32(m.mode);\n if (m.mtime != null && Object.hasOwnProperty.call(m, \"mtime\"))\n $root.UnixTime.encode(m.mtime, w.uint32(66).fork()).ldelim();\n return w;\n };\n\n /**\n * Decodes a Data message from the specified reader or buffer.\n * @function decode\n * @memberof Data\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {Data} Data\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Data.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.Data();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.Type = r.int32();\n break;\n case 2:\n m.Data = r.bytes();\n break;\n case 3:\n m.filesize = r.uint64();\n break;\n case 4:\n if (!(m.blocksizes && m.blocksizes.length))\n m.blocksizes = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.blocksizes.push(r.uint64());\n } else\n m.blocksizes.push(r.uint64());\n break;\n case 5:\n m.hashType = r.uint64();\n break;\n case 6:\n m.fanout = r.uint64();\n break;\n case 7:\n m.mode = r.uint32();\n break;\n case 8:\n m.mtime = $root.UnixTime.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n if (!m.hasOwnProperty(\"Type\"))\n throw $util.ProtocolError(\"missing required 'Type'\", { instance: m });\n return m;\n };\n\n /**\n * Creates a Data message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof Data\n * @static\n * @param {Object.} d Plain object\n * @returns {Data} Data\n */\n Data.fromObject = function fromObject(d) {\n if (d instanceof $root.Data)\n return d;\n var m = new $root.Data();\n switch (d.Type) {\n case \"Raw\":\n case 0:\n m.Type = 0;\n break;\n case \"Directory\":\n case 1:\n m.Type = 1;\n break;\n case \"File\":\n case 2:\n m.Type = 2;\n break;\n case \"Metadata\":\n case 3:\n m.Type = 3;\n break;\n case \"Symlink\":\n case 4:\n m.Type = 4;\n break;\n case \"HAMTShard\":\n case 5:\n m.Type = 5;\n break;\n }\n if (d.Data != null) {\n if (typeof d.Data === \"string\")\n $util.base64.decode(d.Data, m.Data = $util.newBuffer($util.base64.length(d.Data)), 0);\n else if (d.Data.length)\n m.Data = d.Data;\n }\n if (d.filesize != null) {\n if ($util.Long)\n (m.filesize = $util.Long.fromValue(d.filesize)).unsigned = true;\n else if (typeof d.filesize === \"string\")\n m.filesize = parseInt(d.filesize, 10);\n else if (typeof d.filesize === \"number\")\n m.filesize = d.filesize;\n else if (typeof d.filesize === \"object\")\n m.filesize = new $util.LongBits(d.filesize.low >>> 0, d.filesize.high >>> 0).toNumber(true);\n }\n if (d.blocksizes) {\n if (!Array.isArray(d.blocksizes))\n throw TypeError(\".Data.blocksizes: array expected\");\n m.blocksizes = [];\n for (var i = 0; i < d.blocksizes.length; ++i) {\n if ($util.Long)\n (m.blocksizes[i] = $util.Long.fromValue(d.blocksizes[i])).unsigned = true;\n else if (typeof d.blocksizes[i] === \"string\")\n m.blocksizes[i] = parseInt(d.blocksizes[i], 10);\n else if (typeof d.blocksizes[i] === \"number\")\n m.blocksizes[i] = d.blocksizes[i];\n else if (typeof d.blocksizes[i] === \"object\")\n m.blocksizes[i] = new $util.LongBits(d.blocksizes[i].low >>> 0, d.blocksizes[i].high >>> 0).toNumber(true);\n }\n }\n if (d.hashType != null) {\n if ($util.Long)\n (m.hashType = $util.Long.fromValue(d.hashType)).unsigned = true;\n else if (typeof d.hashType === \"string\")\n m.hashType = parseInt(d.hashType, 10);\n else if (typeof d.hashType === \"number\")\n m.hashType = d.hashType;\n else if (typeof d.hashType === \"object\")\n m.hashType = new $util.LongBits(d.hashType.low >>> 0, d.hashType.high >>> 0).toNumber(true);\n }\n if (d.fanout != null) {\n if ($util.Long)\n (m.fanout = $util.Long.fromValue(d.fanout)).unsigned = true;\n else if (typeof d.fanout === \"string\")\n m.fanout = parseInt(d.fanout, 10);\n else if (typeof d.fanout === \"number\")\n m.fanout = d.fanout;\n else if (typeof d.fanout === \"object\")\n m.fanout = new $util.LongBits(d.fanout.low >>> 0, d.fanout.high >>> 0).toNumber(true);\n }\n if (d.mode != null) {\n m.mode = d.mode >>> 0;\n }\n if (d.mtime != null) {\n if (typeof d.mtime !== \"object\")\n throw TypeError(\".Data.mtime: object expected\");\n m.mtime = $root.UnixTime.fromObject(d.mtime);\n }\n return m;\n };\n\n /**\n * Creates a plain object from a Data message. Also converts values to other types if specified.\n * @function toObject\n * @memberof Data\n * @static\n * @param {Data} m Data\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n Data.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.arrays || o.defaults) {\n d.blocksizes = [];\n }\n if (o.defaults) {\n d.Type = o.enums === String ? \"Raw\" : 0;\n if (o.bytes === String)\n d.Data = \"\";\n else {\n d.Data = [];\n if (o.bytes !== Array)\n d.Data = $util.newBuffer(d.Data);\n }\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.filesize = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.filesize = o.longs === String ? \"0\" : 0;\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.hashType = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.hashType = o.longs === String ? \"0\" : 0;\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.fanout = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.fanout = o.longs === String ? \"0\" : 0;\n d.mode = 0;\n d.mtime = null;\n }\n if (m.Type != null && m.hasOwnProperty(\"Type\")) {\n d.Type = o.enums === String ? $root.Data.DataType[m.Type] : m.Type;\n }\n if (m.Data != null && m.hasOwnProperty(\"Data\")) {\n d.Data = o.bytes === String ? $util.base64.encode(m.Data, 0, m.Data.length) : o.bytes === Array ? Array.prototype.slice.call(m.Data) : m.Data;\n }\n if (m.filesize != null && m.hasOwnProperty(\"filesize\")) {\n if (typeof m.filesize === \"number\")\n d.filesize = o.longs === String ? String(m.filesize) : m.filesize;\n else\n d.filesize = o.longs === String ? $util.Long.prototype.toString.call(m.filesize) : o.longs === Number ? new $util.LongBits(m.filesize.low >>> 0, m.filesize.high >>> 0).toNumber(true) : m.filesize;\n }\n if (m.blocksizes && m.blocksizes.length) {\n d.blocksizes = [];\n for (var j = 0; j < m.blocksizes.length; ++j) {\n if (typeof m.blocksizes[j] === \"number\")\n d.blocksizes[j] = o.longs === String ? String(m.blocksizes[j]) : m.blocksizes[j];\n else\n d.blocksizes[j] = o.longs === String ? $util.Long.prototype.toString.call(m.blocksizes[j]) : o.longs === Number ? new $util.LongBits(m.blocksizes[j].low >>> 0, m.blocksizes[j].high >>> 0).toNumber(true) : m.blocksizes[j];\n }\n }\n if (m.hashType != null && m.hasOwnProperty(\"hashType\")) {\n if (typeof m.hashType === \"number\")\n d.hashType = o.longs === String ? String(m.hashType) : m.hashType;\n else\n d.hashType = o.longs === String ? $util.Long.prototype.toString.call(m.hashType) : o.longs === Number ? new $util.LongBits(m.hashType.low >>> 0, m.hashType.high >>> 0).toNumber(true) : m.hashType;\n }\n if (m.fanout != null && m.hasOwnProperty(\"fanout\")) {\n if (typeof m.fanout === \"number\")\n d.fanout = o.longs === String ? String(m.fanout) : m.fanout;\n else\n d.fanout = o.longs === String ? $util.Long.prototype.toString.call(m.fanout) : o.longs === Number ? new $util.LongBits(m.fanout.low >>> 0, m.fanout.high >>> 0).toNumber(true) : m.fanout;\n }\n if (m.mode != null && m.hasOwnProperty(\"mode\")) {\n d.mode = m.mode;\n }\n if (m.mtime != null && m.hasOwnProperty(\"mtime\")) {\n d.mtime = $root.UnixTime.toObject(m.mtime, o);\n }\n return d;\n };\n\n /**\n * Converts this Data to JSON.\n * @function toJSON\n * @memberof Data\n * @instance\n * @returns {Object.} JSON object\n */\n Data.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * DataType enum.\n * @name Data.DataType\n * @enum {number}\n * @property {number} Raw=0 Raw value\n * @property {number} Directory=1 Directory value\n * @property {number} File=2 File value\n * @property {number} Metadata=3 Metadata value\n * @property {number} Symlink=4 Symlink value\n * @property {number} HAMTShard=5 HAMTShard value\n */\n Data.DataType = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"Raw\"] = 0;\n values[valuesById[1] = \"Directory\"] = 1;\n values[valuesById[2] = \"File\"] = 2;\n values[valuesById[3] = \"Metadata\"] = 3;\n values[valuesById[4] = \"Symlink\"] = 4;\n values[valuesById[5] = \"HAMTShard\"] = 5;\n return values;\n })();\n\n return Data;\n})();\n\n$root.UnixTime = (function() {\n\n /**\n * Properties of an UnixTime.\n * @exports IUnixTime\n * @interface IUnixTime\n * @property {number} Seconds UnixTime Seconds\n * @property {number|null} [FractionalNanoseconds] UnixTime FractionalNanoseconds\n */\n\n /**\n * Constructs a new UnixTime.\n * @exports UnixTime\n * @classdesc Represents an UnixTime.\n * @implements IUnixTime\n * @constructor\n * @param {IUnixTime=} [p] Properties to set\n */\n function UnixTime(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * UnixTime Seconds.\n * @member {number} Seconds\n * @memberof UnixTime\n * @instance\n */\n UnixTime.prototype.Seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * UnixTime FractionalNanoseconds.\n * @member {number} FractionalNanoseconds\n * @memberof UnixTime\n * @instance\n */\n UnixTime.prototype.FractionalNanoseconds = 0;\n\n /**\n * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages.\n * @function encode\n * @memberof UnixTime\n * @static\n * @param {IUnixTime} m UnixTime message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n UnixTime.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n w.uint32(8).int64(m.Seconds);\n if (m.FractionalNanoseconds != null && Object.hasOwnProperty.call(m, \"FractionalNanoseconds\"))\n w.uint32(21).fixed32(m.FractionalNanoseconds);\n return w;\n };\n\n /**\n * Decodes an UnixTime message from the specified reader or buffer.\n * @function decode\n * @memberof UnixTime\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {UnixTime} UnixTime\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n UnixTime.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.UnixTime();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.Seconds = r.int64();\n break;\n case 2:\n m.FractionalNanoseconds = r.fixed32();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n if (!m.hasOwnProperty(\"Seconds\"))\n throw $util.ProtocolError(\"missing required 'Seconds'\", { instance: m });\n return m;\n };\n\n /**\n * Creates an UnixTime message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof UnixTime\n * @static\n * @param {Object.} d Plain object\n * @returns {UnixTime} UnixTime\n */\n UnixTime.fromObject = function fromObject(d) {\n if (d instanceof $root.UnixTime)\n return d;\n var m = new $root.UnixTime();\n if (d.Seconds != null) {\n if ($util.Long)\n (m.Seconds = $util.Long.fromValue(d.Seconds)).unsigned = false;\n else if (typeof d.Seconds === \"string\")\n m.Seconds = parseInt(d.Seconds, 10);\n else if (typeof d.Seconds === \"number\")\n m.Seconds = d.Seconds;\n else if (typeof d.Seconds === \"object\")\n m.Seconds = new $util.LongBits(d.Seconds.low >>> 0, d.Seconds.high >>> 0).toNumber();\n }\n if (d.FractionalNanoseconds != null) {\n m.FractionalNanoseconds = d.FractionalNanoseconds >>> 0;\n }\n return m;\n };\n\n /**\n * Creates a plain object from an UnixTime message. Also converts values to other types if specified.\n * @function toObject\n * @memberof UnixTime\n * @static\n * @param {UnixTime} m UnixTime\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n UnixTime.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.defaults) {\n if ($util.Long) {\n var n = new $util.Long(0, 0, false);\n d.Seconds = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.Seconds = o.longs === String ? \"0\" : 0;\n d.FractionalNanoseconds = 0;\n }\n if (m.Seconds != null && m.hasOwnProperty(\"Seconds\")) {\n if (typeof m.Seconds === \"number\")\n d.Seconds = o.longs === String ? String(m.Seconds) : m.Seconds;\n else\n d.Seconds = o.longs === String ? $util.Long.prototype.toString.call(m.Seconds) : o.longs === Number ? new $util.LongBits(m.Seconds.low >>> 0, m.Seconds.high >>> 0).toNumber() : m.Seconds;\n }\n if (m.FractionalNanoseconds != null && m.hasOwnProperty(\"FractionalNanoseconds\")) {\n d.FractionalNanoseconds = m.FractionalNanoseconds;\n }\n return d;\n };\n\n /**\n * Converts this UnixTime to JSON.\n * @function toJSON\n * @memberof UnixTime\n * @instance\n * @returns {Object.} JSON object\n */\n UnixTime.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n return UnixTime;\n})();\n\n$root.Metadata = (function() {\n\n /**\n * Properties of a Metadata.\n * @exports IMetadata\n * @interface IMetadata\n * @property {string|null} [MimeType] Metadata MimeType\n */\n\n /**\n * Constructs a new Metadata.\n * @exports Metadata\n * @classdesc Represents a Metadata.\n * @implements IMetadata\n * @constructor\n * @param {IMetadata=} [p] Properties to set\n */\n function Metadata(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * Metadata MimeType.\n * @member {string} MimeType\n * @memberof Metadata\n * @instance\n */\n Metadata.prototype.MimeType = \"\";\n\n /**\n * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.\n * @function encode\n * @memberof Metadata\n * @static\n * @param {IMetadata} m Metadata message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Metadata.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n if (m.MimeType != null && Object.hasOwnProperty.call(m, \"MimeType\"))\n w.uint32(10).string(m.MimeType);\n return w;\n };\n\n /**\n * Decodes a Metadata message from the specified reader or buffer.\n * @function decode\n * @memberof Metadata\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {Metadata} Metadata\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Metadata.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.MimeType = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n /**\n * Creates a Metadata message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof Metadata\n * @static\n * @param {Object.} d Plain object\n * @returns {Metadata} Metadata\n */\n Metadata.fromObject = function fromObject(d) {\n if (d instanceof $root.Metadata)\n return d;\n var m = new $root.Metadata();\n if (d.MimeType != null) {\n m.MimeType = String(d.MimeType);\n }\n return m;\n };\n\n /**\n * Creates a plain object from a Metadata message. Also converts values to other types if specified.\n * @function toObject\n * @memberof Metadata\n * @static\n * @param {Metadata} m Metadata\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n Metadata.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.defaults) {\n d.MimeType = \"\";\n }\n if (m.MimeType != null && m.hasOwnProperty(\"MimeType\")) {\n d.MimeType = m.MimeType;\n }\n return d;\n };\n\n /**\n * Converts this Metadata to JSON.\n * @function toJSON\n * @memberof Metadata\n * @instance\n * @returns {Object.} JSON object\n */\n Metadata.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n return Metadata;\n})();\n\nmodule.exports = $root;\n","'use strict'\n\nconst errCode = require('err-code')\nconst { CID } = require('multiformats/cid')\nconst resolve = require('./resolvers')\nconst last = require('it-last')\n\n/**\n * @typedef {import('ipfs-unixfs').UnixFS} UnixFS\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {import('./types').ExporterOptions} ExporterOptions\n * @typedef {import('./types').UnixFSFile} UnixFSFile\n * @typedef {import('./types').UnixFSDirectory} UnixFSDirectory\n * @typedef {import('./types').ObjectNode} ObjectNode\n * @typedef {import('./types').RawNode} RawNode\n * @typedef {import('./types').IdentityNode} IdentityNode\n * @typedef {import('./types').UnixFSEntry} UnixFSEntry\n */\n\nconst toPathComponents = (path = '') => {\n // split on / unless escaped with \\\n return (path\n .trim()\n .match(/([^\\\\^/]|\\\\\\/)+/g) || [])\n .filter(Boolean)\n}\n\n/**\n * @param {string|Uint8Array|CID} path\n */\nconst cidAndRest = (path) => {\n if (path instanceof Uint8Array) {\n return {\n cid: CID.decode(path),\n toResolve: []\n }\n }\n\n const cid = CID.asCID(path)\n if (cid) {\n return {\n cid,\n toResolve: []\n }\n }\n\n if (typeof path === 'string') {\n if (path.indexOf('/ipfs/') === 0) {\n path = path.substring(6)\n }\n\n const output = toPathComponents(path)\n\n return {\n cid: CID.parse(output[0]),\n toResolve: output.slice(1)\n }\n }\n\n throw errCode(new Error(`Unknown path type ${path}`), 'ERR_BAD_PATH')\n}\n\n/**\n * @param {string | CID} path\n * @param {Blockstore} blockstore\n * @param {ExporterOptions} [options]\n */\nasync function * walkPath (path, blockstore, options = {}) {\n let {\n cid,\n toResolve\n } = cidAndRest(path)\n let name = cid.toString()\n let entryPath = name\n const startingDepth = toResolve.length\n\n while (true) {\n const result = await resolve(cid, name, entryPath, toResolve, startingDepth, blockstore, options)\n\n if (!result.entry && !result.next) {\n throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND')\n }\n\n if (result.entry) {\n yield result.entry\n }\n\n if (!result.next) {\n return\n }\n\n // resolve further parts\n toResolve = result.next.toResolve\n cid = result.next.cid\n name = result.next.name\n entryPath = result.next.path\n }\n}\n\n/**\n * @param {string | CID} path\n * @param {Blockstore} blockstore\n * @param {ExporterOptions} [options]\n */\nasync function exporter (path, blockstore, options = {}) {\n const result = await last(walkPath(path, blockstore, options))\n\n if (!result) {\n throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND')\n }\n\n return result\n}\n\n/**\n * @param {string | CID} path\n * @param {Blockstore} blockstore\n * @param {ExporterOptions} [options]\n */\nasync function * recursive (path, blockstore, options = {}) {\n const node = await exporter(path, blockstore, options)\n\n if (!node) {\n return\n }\n\n yield node\n\n if (node.type === 'directory') {\n for await (const child of recurse(node, options)) {\n yield child\n }\n }\n\n /**\n * @param {UnixFSDirectory} node\n * @param {ExporterOptions} options\n * @returns {AsyncGenerator}\n */\n async function * recurse (node, options) {\n for await (const file of node.content(options)) {\n yield file\n\n if (file instanceof Uint8Array) {\n continue\n }\n\n if (file.type === 'directory') {\n yield * recurse(file, options)\n }\n }\n }\n}\n\nmodule.exports = {\n exporter,\n walkPath,\n recursive\n}\n","'use strict'\n\nconst { CID } = require('multiformats/cid')\nconst errCode = require('err-code')\nconst dagCbor = require('@ipld/dag-cbor')\n\n/**\n * @typedef {import('../types').Resolver} Resolver\n */\n\n/**\n * @type {Resolver}\n */\nconst resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {\n const block = await blockstore.get(cid)\n const object = dagCbor.decode(block)\n let subObject = object\n let subPath = path\n\n while (toResolve.length) {\n const prop = toResolve[0]\n\n if (prop in subObject) {\n // remove the bit of the path we have resolved\n toResolve.shift()\n subPath = `${subPath}/${prop}`\n\n const subObjectCid = CID.asCID(subObject[prop])\n if (subObjectCid) {\n return {\n entry: {\n type: 'object',\n name,\n path,\n cid,\n node: block,\n depth,\n size: block.length,\n content: async function * () {\n yield object\n }\n },\n next: {\n cid: subObjectCid,\n name: prop,\n path: subPath,\n toResolve\n }\n }\n }\n\n subObject = subObject[prop]\n } else {\n // cannot resolve further\n throw errCode(new Error(`No property named ${prop} found in cbor node ${cid}`), 'ERR_NO_PROP')\n }\n }\n\n return {\n entry: {\n type: 'object',\n name,\n path,\n cid,\n node: block,\n depth,\n size: block.length,\n content: async function * () {\n yield object\n }\n }\n }\n}\n\nmodule.exports = resolve\n","'use strict'\n\nconst errCode = require('err-code')\nconst extractDataFromBlock = require('../utils/extract-data-from-block')\nconst validateOffsetAndLength = require('../utils/validate-offset-and-length')\nconst mh = require('multiformats/hashes/digest')\n\n/**\n * @typedef {import('../types').ExporterOptions} ExporterOptions\n * @typedef {import('../types').Resolver} Resolver\n */\n\n/**\n * @param {Uint8Array} node\n */\nconst rawContent = (node) => {\n /**\n * @param {ExporterOptions} options\n */\n async function * contentGenerator (options = {}) {\n const {\n offset,\n length\n } = validateOffsetAndLength(node.length, options.offset, options.length)\n\n yield extractDataFromBlock(node, 0, offset, offset + length)\n }\n\n return contentGenerator\n}\n\n/**\n * @type {Resolver}\n */\nconst resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {\n if (toResolve.length) {\n throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND')\n }\n const buf = await mh.decode(cid.multihash.bytes)\n\n return {\n entry: {\n type: 'identity',\n name,\n path,\n cid,\n content: rawContent(buf.digest),\n depth,\n size: buf.digest.length,\n node: buf.digest\n }\n }\n}\n\nmodule.exports = resolve\n","'use strict'\n\nconst errCode = require('err-code')\n\nconst dagPb = require('@ipld/dag-pb')\nconst dagCbor = require('@ipld/dag-cbor')\nconst raw = require('multiformats/codecs/raw')\nconst { identity } = require('multiformats/hashes/identity')\n\n/**\n * @typedef {import('../types').Resolver} Resolver\n * @typedef {import('../types').Resolve} Resolve\n */\n\n/**\n * @type {{ [ key: string ]: Resolver }}\n */\nconst resolvers = {\n [dagPb.code]: require('./unixfs-v1'),\n [raw.code]: require('./raw'),\n [dagCbor.code]: require('./dag-cbor'),\n [identity.code]: require('./identity')\n}\n\n/**\n * @type {Resolve}\n */\nfunction resolve (cid, name, path, toResolve, depth, blockstore, options) {\n const resolver = resolvers[cid.code]\n\n if (!resolver) {\n throw errCode(new Error(`No resolver for code ${cid.code}`), 'ERR_NO_RESOLVER')\n }\n\n return resolver(cid, name, path, toResolve, resolve, depth, blockstore, options)\n}\n\nmodule.exports = resolve\n","'use strict'\n\nconst errCode = require('err-code')\nconst extractDataFromBlock = require('../utils/extract-data-from-block')\nconst validateOffsetAndLength = require('../utils/validate-offset-and-length')\n\n/**\n * @typedef {import('../types').ExporterOptions} ExporterOptions\n */\n\n/**\n * @param {Uint8Array} node\n */\nconst rawContent = (node) => {\n /**\n * @param {ExporterOptions} options\n */\n async function * contentGenerator (options = {}) {\n const {\n offset,\n length\n } = validateOffsetAndLength(node.length, options.offset, options.length)\n\n yield extractDataFromBlock(node, 0, offset, offset + length)\n }\n\n return contentGenerator\n}\n\n/**\n * @type {import('../types').Resolver}\n */\nconst resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {\n if (toResolve.length) {\n throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND')\n }\n\n const block = await blockstore.get(cid, options)\n\n return {\n entry: {\n type: 'raw',\n name,\n path,\n cid,\n content: rawContent(block),\n depth,\n size: block.length,\n node: block\n }\n }\n}\n\nmodule.exports = resolve\n","'use strict'\n\n/**\n * @typedef {import('../../../types').ExporterOptions} ExporterOptions\n * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent\n * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver\n */\n\n/**\n * @type {UnixfsV1Resolver}\n */\nconst directoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {\n /**\n * @param {ExporterOptions} [options]\n * @returns {UnixfsV1DirectoryContent}\n */\n async function * yieldDirectoryContent (options = {}) {\n const offset = options.offset || 0\n const length = options.length || node.Links.length\n const links = node.Links.slice(offset, length)\n\n for (const link of links) {\n const result = await resolve(link.Hash, link.Name || '', `${path}/${link.Name || ''}`, [], depth + 1, blockstore, options)\n\n if (result.entry) {\n yield result.entry\n }\n }\n }\n\n return yieldDirectoryContent\n}\n\nmodule.exports = directoryContent\n","'use strict'\n\nconst extractDataFromBlock = require('../../../utils/extract-data-from-block')\nconst validateOffsetAndLength = require('../../../utils/validate-offset-and-length')\nconst { UnixFS } = require('ipfs-unixfs')\nconst errCode = require('err-code')\nconst dagPb = require('@ipld/dag-pb')\nconst dagCbor = require('@ipld/dag-cbor')\nconst raw = require('multiformats/codecs/raw')\n\n/**\n * @typedef {import('../../../types').ExporterOptions} ExporterOptions\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {import('@ipld/dag-pb').PBNode} PBNode\n *\n * @param {Blockstore} blockstore\n * @param {PBNode} node\n * @param {number} start\n * @param {number} end\n * @param {number} streamPosition\n * @param {ExporterOptions} options\n * @returns {AsyncIterable}\n */\nasync function * emitBytes (blockstore, node, start, end, streamPosition = 0, options) {\n // a `raw` node\n if (node instanceof Uint8Array) {\n const buf = extractDataFromBlock(node, streamPosition, start, end)\n\n if (buf.length) {\n yield buf\n }\n\n streamPosition += buf.length\n\n return streamPosition\n }\n\n if (node.Data == null) {\n throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS')\n }\n\n let file\n\n try {\n file = UnixFS.unmarshal(node.Data)\n } catch (err) {\n throw errCode(err, 'ERR_NOT_UNIXFS')\n }\n\n // might be a unixfs `raw` node or have data on intermediate nodes\n if (file.data && file.data.length) {\n const buf = extractDataFromBlock(file.data, streamPosition, start, end)\n\n if (buf.length) {\n yield buf\n }\n\n streamPosition += file.data.length\n }\n\n let childStart = streamPosition\n\n // work out which child nodes contain the requested data\n for (let i = 0; i < node.Links.length; i++) {\n const childLink = node.Links[i]\n const childEnd = streamPosition + file.blockSizes[i]\n\n if ((start >= childStart && start < childEnd) || // child has offset byte\n (end > childStart && end <= childEnd) || // child has end byte\n (start < childStart && end > childEnd)) { // child is between offset and end bytes\n const block = await blockstore.get(childLink.Hash, {\n signal: options.signal\n })\n let child\n switch (childLink.Hash.code) {\n case dagPb.code:\n child = await dagPb.decode(block)\n break\n case raw.code:\n child = block\n break\n case dagCbor.code:\n child = await dagCbor.decode(block)\n break\n default:\n throw Error(`Unsupported codec: ${childLink.Hash.code}`)\n }\n\n for await (const buf of emitBytes(blockstore, child, start, end, streamPosition, options)) {\n streamPosition += buf.length\n\n yield buf\n }\n }\n\n streamPosition = childEnd\n childStart = childEnd + 1\n }\n}\n\n/**\n * @type {import('../').UnixfsV1Resolver}\n */\nconst fileContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {\n /**\n * @param {ExporterOptions} options\n */\n function yieldFileContent (options = {}) {\n const fileSize = unixfs.fileSize()\n\n if (fileSize === undefined) {\n throw new Error('File was a directory')\n }\n\n const {\n offset,\n length\n } = validateOffsetAndLength(fileSize, options.offset, options.length)\n\n const start = offset\n const end = offset + length\n\n return emitBytes(blockstore, node, start, end, 0, options)\n }\n\n return yieldFileContent\n}\n\nmodule.exports = fileContent\n","'use strict'\n\nconst { decode } = require('@ipld/dag-pb')\n\n/**\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {import('../../../types').ExporterOptions} ExporterOptions\n * @typedef {import('../../../types').Resolve} Resolve\n * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent\n * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver\n * @typedef {import('@ipld/dag-pb').PBNode} PBNode\n */\n\n/**\n * @type {UnixfsV1Resolver}\n */\nconst hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {\n /**\n * @param {ExporterOptions} options\n *\n */\n function yieldHamtDirectoryContent (options = {}) {\n return listDirectory(node, path, resolve, depth, blockstore, options)\n }\n\n return yieldHamtDirectoryContent\n}\n\n/**\n * @param {PBNode} node\n * @param {string} path\n * @param {Resolve} resolve\n * @param {number} depth\n * @param {Blockstore} blockstore\n * @param {ExporterOptions} options\n *\n * @returns {UnixfsV1DirectoryContent}\n */\nasync function * listDirectory (node, path, resolve, depth, blockstore, options) {\n const links = node.Links\n\n for (const link of links) {\n const name = link.Name != null ? link.Name.substring(2) : null\n\n if (name) {\n const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, blockstore, options)\n\n yield result.entry\n } else {\n // descend into subshard\n const block = await blockstore.get(link.Hash)\n node = decode(block)\n\n for await (const file of listDirectory(node, path, resolve, depth, blockstore, options)) {\n yield file\n }\n }\n }\n}\n\nmodule.exports = hamtShardedDirectoryContent\n","'use strict'\n\nconst errCode = require('err-code')\nconst { UnixFS } = require('ipfs-unixfs')\nconst findShardCid = require('../../utils/find-cid-in-shard')\nconst { decode } = require('@ipld/dag-pb')\n\n/**\n * @typedef {import('../../types').Resolve} Resolve\n * @typedef {import('../../types').Resolver} Resolver\n * @typedef {import('../../types').UnixfsV1Resolver} UnixfsV1Resolver\n * @typedef {import('@ipld/dag-pb').PBNode} PBNode\n */\n\n/**\n * @param {PBNode} node\n * @param {string} name\n */\nconst findLinkCid = (node, name) => {\n const link = node.Links.find(link => link.Name === name)\n\n return link && link.Hash\n}\n\n/**\n * @type {{ [key: string]: UnixfsV1Resolver }}\n */\nconst contentExporters = {\n raw: require('./content/file'),\n file: require('./content/file'),\n directory: require('./content/directory'),\n 'hamt-sharded-directory': require('./content/hamt-sharded-directory'),\n metadata: (cid, node, unixfs, path, resolve, depth, blockstore) => {\n return () => []\n },\n symlink: (cid, node, unixfs, path, resolve, depth, blockstore) => {\n return () => []\n }\n}\n\n/**\n * @type {Resolver}\n */\nconst unixFsResolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {\n const block = await blockstore.get(cid, options)\n const node = decode(block)\n let unixfs\n let next\n\n if (!name) {\n name = cid.toString()\n }\n\n if (node.Data == null) {\n throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS')\n }\n\n try {\n unixfs = UnixFS.unmarshal(node.Data)\n } catch (err) {\n // non-UnixFS dag-pb node? It could happen.\n throw errCode(err, 'ERR_NOT_UNIXFS')\n }\n\n if (!path) {\n path = name\n }\n\n if (toResolve.length) {\n let linkCid\n\n if (unixfs && unixfs.type === 'hamt-sharded-directory') {\n // special case - unixfs v1 hamt shards\n linkCid = await findShardCid(node, toResolve[0], blockstore)\n } else {\n linkCid = findLinkCid(node, toResolve[0])\n }\n\n if (!linkCid) {\n throw errCode(new Error('file does not exist'), 'ERR_NOT_FOUND')\n }\n\n // remove the path component we have resolved\n const nextName = toResolve.shift()\n const nextPath = `${path}/${nextName}`\n\n next = {\n cid: linkCid,\n toResolve,\n name: nextName || '',\n path: nextPath\n }\n }\n\n return {\n entry: {\n type: unixfs.isDirectory() ? 'directory' : 'file',\n name,\n path,\n cid,\n // @ts-ignore\n content: contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, blockstore),\n unixfs,\n depth,\n node,\n size: unixfs.fileSize()\n },\n next\n }\n}\n\nmodule.exports = unixFsResolver\n","'use strict'\n\n/**\n * @param {Uint8Array} block\n * @param {number} blockStart\n * @param {number} requestedStart\n * @param {number} requestedEnd\n */\nmodule.exports = function extractDataFromBlock (block, blockStart, requestedStart, requestedEnd) {\n const blockLength = block.length\n const blockEnd = blockStart + blockLength\n\n if (requestedStart >= blockEnd || requestedEnd < blockStart) {\n // If we are looking for a byte range that is starts after the start of the block,\n // return an empty block. This can happen when internal nodes contain data\n return new Uint8Array(0)\n }\n\n if (requestedEnd >= blockStart && requestedEnd < blockEnd) {\n // If the end byte is in the current block, truncate the block to the end byte\n block = block.slice(0, requestedEnd - blockStart)\n }\n\n if (requestedStart >= blockStart && requestedStart < blockEnd) {\n // If the start byte is in the current block, skip to the start byte\n block = block.slice(requestedStart - blockStart)\n }\n\n return block\n}\n","'use strict'\n\nconst { Bucket, createHAMT } = require('hamt-sharding')\nconst { decode } = require('@ipld/dag-pb')\n// @ts-ignore - no types available\nconst mur = require('murmurhash3js-revisited')\nconst uint8ArrayFromString = require('uint8arrays/from-string')\n\n/**\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {import('multiformats/cid').CID} CID\n * @typedef {import('../types').ExporterOptions} ExporterOptions\n * @typedef {import('@ipld/dag-pb').PBNode} PBNode\n * @typedef {import('@ipld/dag-pb').PBLink} PBLink\n */\n\n// FIXME: this is copy/pasted from ipfs-unixfs-importer/src/options.js\n/**\n * @param {Uint8Array} buf\n */\nconst hashFn = async function (buf) {\n return uint8ArrayFromString(mur.x64.hash128(buf), 'base16').slice(0, 8).reverse()\n}\n\n/**\n * @param {PBLink[]} links\n * @param {Bucket} bucket\n * @param {Bucket} rootBucket\n */\nconst addLinksToHamtBucket = (links, bucket, rootBucket) => {\n return Promise.all(\n links.map(link => {\n if (link.Name == null) {\n // TODO(@rvagg): what do? this is technically possible\n throw new Error('Unexpected Link without a Name')\n }\n if (link.Name.length === 2) {\n const pos = parseInt(link.Name, 16)\n\n return bucket._putObjectAt(pos, new Bucket({\n hash: rootBucket._options.hash,\n bits: rootBucket._options.bits\n }, bucket, pos))\n }\n\n return rootBucket.put(link.Name.substring(2), true)\n })\n )\n}\n\n/**\n * @param {number} position\n */\nconst toPrefix = (position) => {\n return position\n .toString(16)\n .toUpperCase()\n .padStart(2, '0')\n .substring(0, 2)\n}\n\n/**\n * @param {import('hamt-sharding').Bucket.BucketPosition} position\n */\nconst toBucketPath = (position) => {\n let bucket = position.bucket\n const path = []\n\n while (bucket._parent) {\n path.push(bucket)\n\n bucket = bucket._parent\n }\n\n path.push(bucket)\n\n return path.reverse()\n}\n\n/**\n * @typedef {object} ShardTraversalContext\n * @property {number} hamtDepth\n * @property {Bucket} rootBucket\n * @property {Bucket} lastBucket\n *\n * @param {PBNode} node\n * @param {string} name\n * @param {Blockstore} blockstore\n * @param {ShardTraversalContext} [context]\n * @param {ExporterOptions} [options]\n * @returns {Promise}\n */\nconst findShardCid = async (node, name, blockstore, context, options) => {\n if (!context) {\n const rootBucket = createHAMT({\n hashFn\n })\n\n context = {\n rootBucket,\n hamtDepth: 1,\n lastBucket: rootBucket\n }\n }\n\n await addLinksToHamtBucket(node.Links, context.lastBucket, context.rootBucket)\n\n const position = await context.rootBucket._findNewBucketAndPos(name)\n let prefix = toPrefix(position.pos)\n const bucketPath = toBucketPath(position)\n\n if (bucketPath.length > context.hamtDepth) {\n context.lastBucket = bucketPath[context.hamtDepth]\n\n prefix = toPrefix(context.lastBucket._posAtParent)\n }\n\n const link = node.Links.find(link => {\n if (link.Name == null) {\n return false\n }\n\n const entryPrefix = link.Name.substring(0, 2)\n const entryName = link.Name.substring(2)\n\n if (entryPrefix !== prefix) {\n // not the entry or subshard we're looking for\n return false\n }\n\n if (entryName && entryName !== name) {\n // not the entry we're looking for\n return false\n }\n\n return true\n })\n\n if (!link) {\n return null\n }\n\n if (link.Name != null && link.Name.substring(2) === name) {\n return link.Hash\n }\n\n context.hamtDepth++\n\n const block = await blockstore.get(link.Hash, options)\n node = decode(block)\n\n return findShardCid(node, name, blockstore, context, options)\n}\n\nmodule.exports = findShardCid\n","'use strict'\n\nconst errCode = require('err-code')\n\n/**\n * @param {number} size\n * @param {number} [offset]\n * @param {number} [length]\n */\nconst validateOffsetAndLength = (size, offset, length) => {\n if (!offset) {\n offset = 0\n }\n\n if (offset < 0) {\n throw errCode(new Error('Offset must be greater than or equal to 0'), 'ERR_INVALID_PARAMS')\n }\n\n if (offset > size) {\n throw errCode(new Error('Offset must be less than the file size'), 'ERR_INVALID_PARAMS')\n }\n\n if (!length && length !== 0) {\n length = size - offset\n }\n\n if (length < 0) {\n throw errCode(new Error('Length must be greater than or equal to 0'), 'ERR_INVALID_PARAMS')\n }\n\n if (offset + length > size) {\n length = size - offset\n }\n\n return {\n offset,\n length\n }\n}\n\nmodule.exports = validateOffsetAndLength\n","'use strict'\n\nconst { Buffer } = require('buffer')\nconst symbol = Symbol.for('BufferList')\n\nfunction BufferList (buf) {\n if (!(this instanceof BufferList)) {\n return new BufferList(buf)\n }\n\n BufferList._init.call(this, buf)\n}\n\nBufferList._init = function _init (buf) {\n Object.defineProperty(this, symbol, { value: true })\n\n this._bufs = []\n this.length = 0\n\n if (buf) {\n this.append(buf)\n }\n}\n\nBufferList.prototype._new = function _new (buf) {\n return new BufferList(buf)\n}\n\nBufferList.prototype._offset = function _offset (offset) {\n if (offset === 0) {\n return [0, 0]\n }\n\n let tot = 0\n\n for (let i = 0; i < this._bufs.length; i++) {\n const _t = tot + this._bufs[i].length\n if (offset < _t || i === this._bufs.length - 1) {\n return [i, offset - tot]\n }\n tot = _t\n }\n}\n\nBufferList.prototype._reverseOffset = function (blOffset) {\n const bufferId = blOffset[0]\n let offset = blOffset[1]\n\n for (let i = 0; i < bufferId; i++) {\n offset += this._bufs[i].length\n }\n\n return offset\n}\n\nBufferList.prototype.get = function get (index) {\n if (index > this.length || index < 0) {\n return undefined\n }\n\n const offset = this._offset(index)\n\n return this._bufs[offset[0]][offset[1]]\n}\n\nBufferList.prototype.slice = function slice (start, end) {\n if (typeof start === 'number' && start < 0) {\n start += this.length\n }\n\n if (typeof end === 'number' && end < 0) {\n end += this.length\n }\n\n return this.copy(null, 0, start, end)\n}\n\nBufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {\n if (typeof srcStart !== 'number' || srcStart < 0) {\n srcStart = 0\n }\n\n if (typeof srcEnd !== 'number' || srcEnd > this.length) {\n srcEnd = this.length\n }\n\n if (srcStart >= this.length) {\n return dst || Buffer.alloc(0)\n }\n\n if (srcEnd <= 0) {\n return dst || Buffer.alloc(0)\n }\n\n const copy = !!dst\n const off = this._offset(srcStart)\n const len = srcEnd - srcStart\n let bytes = len\n let bufoff = (copy && dstStart) || 0\n let start = off[1]\n\n // copy/slice everything\n if (srcStart === 0 && srcEnd === this.length) {\n if (!copy) {\n // slice, but full concat if multiple buffers\n return this._bufs.length === 1\n ? this._bufs[0]\n : Buffer.concat(this._bufs, this.length)\n }\n\n // copy, need to copy individual buffers\n for (let i = 0; i < this._bufs.length; i++) {\n this._bufs[i].copy(dst, bufoff)\n bufoff += this._bufs[i].length\n }\n\n return dst\n }\n\n // easy, cheap case where it's a subset of one of the buffers\n if (bytes <= this._bufs[off[0]].length - start) {\n return copy\n ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)\n : this._bufs[off[0]].slice(start, start + bytes)\n }\n\n if (!copy) {\n // a slice, we need something to copy in to\n dst = Buffer.allocUnsafe(len)\n }\n\n for (let i = off[0]; i < this._bufs.length; i++) {\n const l = this._bufs[i].length - start\n\n if (bytes > l) {\n this._bufs[i].copy(dst, bufoff, start)\n bufoff += l\n } else {\n this._bufs[i].copy(dst, bufoff, start, start + bytes)\n bufoff += l\n break\n }\n\n bytes -= l\n\n if (start) {\n start = 0\n }\n }\n\n // safeguard so that we don't return uninitialized memory\n if (dst.length > bufoff) return dst.slice(0, bufoff)\n\n return dst\n}\n\nBufferList.prototype.shallowSlice = function shallowSlice (start, end) {\n start = start || 0\n end = typeof end !== 'number' ? this.length : end\n\n if (start < 0) {\n start += this.length\n }\n\n if (end < 0) {\n end += this.length\n }\n\n if (start === end) {\n return this._new()\n }\n\n const startOffset = this._offset(start)\n const endOffset = this._offset(end)\n const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)\n\n if (endOffset[1] === 0) {\n buffers.pop()\n } else {\n buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1])\n }\n\n if (startOffset[1] !== 0) {\n buffers[0] = buffers[0].slice(startOffset[1])\n }\n\n return this._new(buffers)\n}\n\nBufferList.prototype.toString = function toString (encoding, start, end) {\n return this.slice(start, end).toString(encoding)\n}\n\nBufferList.prototype.consume = function consume (bytes) {\n // first, normalize the argument, in accordance with how Buffer does it\n bytes = Math.trunc(bytes)\n // do nothing if not a positive number\n if (Number.isNaN(bytes) || bytes <= 0) return this\n\n while (this._bufs.length) {\n if (bytes >= this._bufs[0].length) {\n bytes -= this._bufs[0].length\n this.length -= this._bufs[0].length\n this._bufs.shift()\n } else {\n this._bufs[0] = this._bufs[0].slice(bytes)\n this.length -= bytes\n break\n }\n }\n\n return this\n}\n\nBufferList.prototype.duplicate = function duplicate () {\n const copy = this._new()\n\n for (let i = 0; i < this._bufs.length; i++) {\n copy.append(this._bufs[i])\n }\n\n return copy\n}\n\nBufferList.prototype.append = function append (buf) {\n if (buf == null) {\n return this\n }\n\n if (buf.buffer) {\n // append a view of the underlying ArrayBuffer\n this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength))\n } else if (Array.isArray(buf)) {\n for (let i = 0; i < buf.length; i++) {\n this.append(buf[i])\n }\n } else if (this._isBufferList(buf)) {\n // unwrap argument into individual BufferLists\n for (let i = 0; i < buf._bufs.length; i++) {\n this.append(buf._bufs[i])\n }\n } else {\n // coerce number arguments to strings, since Buffer(number) does\n // uninitialized memory allocation\n if (typeof buf === 'number') {\n buf = buf.toString()\n }\n\n this._appendBuffer(Buffer.from(buf))\n }\n\n return this\n}\n\nBufferList.prototype._appendBuffer = function appendBuffer (buf) {\n this._bufs.push(buf)\n this.length += buf.length\n}\n\nBufferList.prototype.indexOf = function (search, offset, encoding) {\n if (encoding === undefined && typeof offset === 'string') {\n encoding = offset\n offset = undefined\n }\n\n if (typeof search === 'function' || Array.isArray(search)) {\n throw new TypeError('The \"value\" argument must be one of type string, Buffer, BufferList, or Uint8Array.')\n } else if (typeof search === 'number') {\n search = Buffer.from([search])\n } else if (typeof search === 'string') {\n search = Buffer.from(search, encoding)\n } else if (this._isBufferList(search)) {\n search = search.slice()\n } else if (Array.isArray(search.buffer)) {\n search = Buffer.from(search.buffer, search.byteOffset, search.byteLength)\n } else if (!Buffer.isBuffer(search)) {\n search = Buffer.from(search)\n }\n\n offset = Number(offset || 0)\n\n if (isNaN(offset)) {\n offset = 0\n }\n\n if (offset < 0) {\n offset = this.length + offset\n }\n\n if (offset < 0) {\n offset = 0\n }\n\n if (search.length === 0) {\n return offset > this.length ? this.length : offset\n }\n\n const blOffset = this._offset(offset)\n let blIndex = blOffset[0] // index of which internal buffer we're working on\n let buffOffset = blOffset[1] // offset of the internal buffer we're working on\n\n // scan over each buffer\n for (; blIndex < this._bufs.length; blIndex++) {\n const buff = this._bufs[blIndex]\n\n while (buffOffset < buff.length) {\n const availableWindow = buff.length - buffOffset\n\n if (availableWindow >= search.length) {\n const nativeSearchResult = buff.indexOf(search, buffOffset)\n\n if (nativeSearchResult !== -1) {\n return this._reverseOffset([blIndex, nativeSearchResult])\n }\n\n buffOffset = buff.length - search.length + 1 // end of native search window\n } else {\n const revOffset = this._reverseOffset([blIndex, buffOffset])\n\n if (this._match(revOffset, search)) {\n return revOffset\n }\n\n buffOffset++\n }\n }\n\n buffOffset = 0\n }\n\n return -1\n}\n\nBufferList.prototype._match = function (offset, search) {\n if (this.length - offset < search.length) {\n return false\n }\n\n for (let searchOffset = 0; searchOffset < search.length; searchOffset++) {\n if (this.get(offset + searchOffset) !== search[searchOffset]) {\n return false\n }\n }\n return true\n}\n\n;(function () {\n const methods = {\n readDoubleBE: 8,\n readDoubleLE: 8,\n readFloatBE: 4,\n readFloatLE: 4,\n readInt32BE: 4,\n readInt32LE: 4,\n readUInt32BE: 4,\n readUInt32LE: 4,\n readInt16BE: 2,\n readInt16LE: 2,\n readUInt16BE: 2,\n readUInt16LE: 2,\n readInt8: 1,\n readUInt8: 1,\n readIntBE: null,\n readIntLE: null,\n readUIntBE: null,\n readUIntLE: null\n }\n\n for (const m in methods) {\n (function (m) {\n if (methods[m] === null) {\n BufferList.prototype[m] = function (offset, byteLength) {\n return this.slice(offset, offset + byteLength)[m](0, byteLength)\n }\n } else {\n BufferList.prototype[m] = function (offset = 0) {\n return this.slice(offset, offset + methods[m])[m](0)\n }\n }\n }(m))\n }\n}())\n\n// Used internally by the class and also as an indicator of this object being\n// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser\n// environment because there could be multiple different copies of the\n// BufferList class and some `BufferList`s might be `BufferList`s.\nBufferList.prototype._isBufferList = function _isBufferList (b) {\n return b instanceof BufferList || BufferList.isBufferList(b)\n}\n\nBufferList.isBufferList = function isBufferList (b) {\n return b != null && b[symbol]\n}\n\nmodule.exports = BufferList\n","'use strict'\n\nconst {\n Data: PBData\n} = require('./unixfs')\nconst errcode = require('err-code')\n\n/**\n * @typedef {import('./types').Mtime} Mtime\n * @typedef {import('./types').MtimeLike} MtimeLike\n */\n\nconst types = [\n 'raw',\n 'directory',\n 'file',\n 'metadata',\n 'symlink',\n 'hamt-sharded-directory'\n]\n\nconst dirTypes = [\n 'directory',\n 'hamt-sharded-directory'\n]\n\nconst DEFAULT_FILE_MODE = parseInt('0644', 8)\nconst DEFAULT_DIRECTORY_MODE = parseInt('0755', 8)\n\n/**\n * @param {string | number | undefined} [mode]\n */\nfunction parseMode (mode) {\n if (mode == null) {\n return undefined\n }\n\n if (typeof mode === 'number') {\n return mode & 0xFFF\n }\n\n mode = mode.toString()\n\n if (mode.substring(0, 1) === '0') {\n // octal string\n return parseInt(mode, 8) & 0xFFF\n }\n\n // decimal string\n return parseInt(mode, 10) & 0xFFF\n}\n\n/**\n * @param {any} input\n */\nfunction parseMtime (input) {\n if (input == null) {\n return undefined\n }\n\n /** @type {Mtime | undefined} */\n let mtime\n\n // { secs, nsecs }\n if (input.secs != null) {\n mtime = {\n secs: input.secs,\n nsecs: input.nsecs\n }\n }\n\n // UnixFS TimeSpec\n if (input.Seconds != null) {\n mtime = {\n secs: input.Seconds,\n nsecs: input.FractionalNanoseconds\n }\n }\n\n // process.hrtime()\n if (Array.isArray(input)) {\n mtime = {\n secs: input[0],\n nsecs: input[1]\n }\n }\n\n // Javascript Date\n if (input instanceof Date) {\n const ms = input.getTime()\n const secs = Math.floor(ms / 1000)\n\n mtime = {\n secs: secs,\n nsecs: (ms - (secs * 1000)) * 1000\n }\n }\n\n /*\n TODO: https://github.com/ipfs/aegir/issues/487\n\n // process.hrtime.bigint()\n if (input instanceof BigInt) {\n const secs = input / BigInt(1e9)\n const nsecs = input - (secs * BigInt(1e9))\n\n mtime = {\n secs: parseInt(secs.toString()),\n nsecs: parseInt(nsecs.toString())\n }\n }\n */\n\n if (!Object.prototype.hasOwnProperty.call(mtime, 'secs')) {\n return undefined\n }\n\n if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) {\n throw errcode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS')\n }\n\n return mtime\n}\n\nclass Data {\n /**\n * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md\n *\n * @param {Uint8Array} marshaled\n */\n static unmarshal (marshaled) {\n const message = PBData.decode(marshaled)\n const decoded = PBData.toObject(message, {\n defaults: false,\n arrays: true,\n longs: Number,\n objects: false\n })\n\n const data = new Data({\n type: types[decoded.Type],\n data: decoded.Data,\n blockSizes: decoded.blocksizes,\n mode: decoded.mode,\n mtime: decoded.mtime\n ? {\n secs: decoded.mtime.Seconds,\n nsecs: decoded.mtime.FractionalNanoseconds\n }\n : undefined\n })\n\n // make sure we honour the original mode\n data._originalMode = decoded.mode || 0\n\n return data\n }\n\n /**\n * @param {object} [options]\n * @param {string} [options.type='file']\n * @param {Uint8Array} [options.data]\n * @param {number[]} [options.blockSizes]\n * @param {number} [options.hashType]\n * @param {number} [options.fanout]\n * @param {MtimeLike | null} [options.mtime]\n * @param {number | string} [options.mode]\n */\n constructor (options = {\n type: 'file'\n }) {\n const {\n type,\n data,\n blockSizes,\n hashType,\n fanout,\n mtime,\n mode\n } = options\n\n if (type && !types.includes(type)) {\n throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')\n }\n\n this.type = type || 'file'\n this.data = data\n this.hashType = hashType\n this.fanout = fanout\n\n /** @type {number[]} */\n this.blockSizes = blockSizes || []\n this._originalMode = 0\n this.mode = parseMode(mode)\n\n if (mtime) {\n this.mtime = parseMtime(mtime)\n\n if (this.mtime && !this.mtime.nsecs) {\n this.mtime.nsecs = 0\n }\n }\n }\n\n /**\n * @param {number | undefined} mode\n */\n set mode (mode) {\n this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE\n\n const parsedMode = parseMode(mode)\n\n if (parsedMode !== undefined) {\n this._mode = parsedMode\n }\n }\n\n /**\n * @returns {number | undefined}\n */\n get mode () {\n return this._mode\n }\n\n isDirectory () {\n return Boolean(this.type && dirTypes.includes(this.type))\n }\n\n /**\n * @param {number} size\n */\n addBlockSize (size) {\n this.blockSizes.push(size)\n }\n\n /**\n * @param {number} index\n */\n removeBlockSize (index) {\n this.blockSizes.splice(index, 1)\n }\n\n /**\n * Returns `0` for directories or `data.length + sum(blockSizes)` for everything else\n */\n fileSize () {\n if (this.isDirectory()) {\n // dirs don't have file size\n return 0\n }\n\n let sum = 0\n this.blockSizes.forEach((size) => {\n sum += size\n })\n\n if (this.data) {\n sum += this.data.length\n }\n\n return sum\n }\n\n /**\n * encode to protobuf Uint8Array\n */\n marshal () {\n let type\n\n switch (this.type) {\n case 'raw': type = PBData.DataType.Raw; break\n case 'directory': type = PBData.DataType.Directory; break\n case 'file': type = PBData.DataType.File; break\n case 'metadata': type = PBData.DataType.Metadata; break\n case 'symlink': type = PBData.DataType.Symlink; break\n case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break\n default:\n throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')\n }\n\n let data = this.data\n\n if (!this.data || !this.data.length) {\n data = undefined\n }\n\n let mode\n\n if (this.mode != null) {\n mode = (this._originalMode & 0xFFFFF000) | (parseMode(this.mode) || 0)\n\n if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {\n mode = undefined\n }\n\n if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {\n mode = undefined\n }\n }\n\n let mtime\n\n if (this.mtime != null) {\n const parsed = parseMtime(this.mtime)\n\n if (parsed) {\n mtime = {\n Seconds: parsed.secs,\n FractionalNanoseconds: parsed.nsecs\n }\n\n if (mtime.FractionalNanoseconds === 0) {\n delete mtime.FractionalNanoseconds\n }\n }\n }\n\n const pbData = {\n Type: type,\n Data: data,\n filesize: this.isDirectory() ? undefined : this.fileSize(),\n blocksizes: this.blockSizes,\n hashType: this.hashType,\n fanout: this.fanout,\n mode,\n mtime\n }\n\n return PBData.encode(pbData).finish()\n }\n}\n\nmodule.exports = {\n UnixFS: Data,\n parseMode,\n parseMtime\n}\n","/*eslint-disable*/\n\"use strict\";\n\nvar $protobuf = require(\"protobufjs/minimal\");\n\n// Common aliases\nvar $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;\n\n// Exported root namespace\nvar $root = $protobuf.roots[\"ipfs-unixfs\"] || ($protobuf.roots[\"ipfs-unixfs\"] = {});\n\n$root.Data = (function() {\n\n /**\n * Properties of a Data.\n * @exports IData\n * @interface IData\n * @property {Data.DataType} Type Data Type\n * @property {Uint8Array|null} [Data] Data Data\n * @property {number|null} [filesize] Data filesize\n * @property {Array.|null} [blocksizes] Data blocksizes\n * @property {number|null} [hashType] Data hashType\n * @property {number|null} [fanout] Data fanout\n * @property {number|null} [mode] Data mode\n * @property {IUnixTime|null} [mtime] Data mtime\n */\n\n /**\n * Constructs a new Data.\n * @exports Data\n * @classdesc Represents a Data.\n * @implements IData\n * @constructor\n * @param {IData=} [p] Properties to set\n */\n function Data(p) {\n this.blocksizes = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * Data Type.\n * @member {Data.DataType} Type\n * @memberof Data\n * @instance\n */\n Data.prototype.Type = 0;\n\n /**\n * Data Data.\n * @member {Uint8Array} Data\n * @memberof Data\n * @instance\n */\n Data.prototype.Data = $util.newBuffer([]);\n\n /**\n * Data filesize.\n * @member {number} filesize\n * @memberof Data\n * @instance\n */\n Data.prototype.filesize = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data blocksizes.\n * @member {Array.} blocksizes\n * @memberof Data\n * @instance\n */\n Data.prototype.blocksizes = $util.emptyArray;\n\n /**\n * Data hashType.\n * @member {number} hashType\n * @memberof Data\n * @instance\n */\n Data.prototype.hashType = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data fanout.\n * @member {number} fanout\n * @memberof Data\n * @instance\n */\n Data.prototype.fanout = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data mode.\n * @member {number} mode\n * @memberof Data\n * @instance\n */\n Data.prototype.mode = 0;\n\n /**\n * Data mtime.\n * @member {IUnixTime|null|undefined} mtime\n * @memberof Data\n * @instance\n */\n Data.prototype.mtime = null;\n\n /**\n * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages.\n * @function encode\n * @memberof Data\n * @static\n * @param {IData} m Data message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Data.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n w.uint32(8).int32(m.Type);\n if (m.Data != null && Object.hasOwnProperty.call(m, \"Data\"))\n w.uint32(18).bytes(m.Data);\n if (m.filesize != null && Object.hasOwnProperty.call(m, \"filesize\"))\n w.uint32(24).uint64(m.filesize);\n if (m.blocksizes != null && m.blocksizes.length) {\n for (var i = 0; i < m.blocksizes.length; ++i)\n w.uint32(32).uint64(m.blocksizes[i]);\n }\n if (m.hashType != null && Object.hasOwnProperty.call(m, \"hashType\"))\n w.uint32(40).uint64(m.hashType);\n if (m.fanout != null && Object.hasOwnProperty.call(m, \"fanout\"))\n w.uint32(48).uint64(m.fanout);\n if (m.mode != null && Object.hasOwnProperty.call(m, \"mode\"))\n w.uint32(56).uint32(m.mode);\n if (m.mtime != null && Object.hasOwnProperty.call(m, \"mtime\"))\n $root.UnixTime.encode(m.mtime, w.uint32(66).fork()).ldelim();\n return w;\n };\n\n /**\n * Decodes a Data message from the specified reader or buffer.\n * @function decode\n * @memberof Data\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {Data} Data\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Data.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.Data();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.Type = r.int32();\n break;\n case 2:\n m.Data = r.bytes();\n break;\n case 3:\n m.filesize = r.uint64();\n break;\n case 4:\n if (!(m.blocksizes && m.blocksizes.length))\n m.blocksizes = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.blocksizes.push(r.uint64());\n } else\n m.blocksizes.push(r.uint64());\n break;\n case 5:\n m.hashType = r.uint64();\n break;\n case 6:\n m.fanout = r.uint64();\n break;\n case 7:\n m.mode = r.uint32();\n break;\n case 8:\n m.mtime = $root.UnixTime.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n if (!m.hasOwnProperty(\"Type\"))\n throw $util.ProtocolError(\"missing required 'Type'\", { instance: m });\n return m;\n };\n\n /**\n * Creates a Data message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof Data\n * @static\n * @param {Object.} d Plain object\n * @returns {Data} Data\n */\n Data.fromObject = function fromObject(d) {\n if (d instanceof $root.Data)\n return d;\n var m = new $root.Data();\n switch (d.Type) {\n case \"Raw\":\n case 0:\n m.Type = 0;\n break;\n case \"Directory\":\n case 1:\n m.Type = 1;\n break;\n case \"File\":\n case 2:\n m.Type = 2;\n break;\n case \"Metadata\":\n case 3:\n m.Type = 3;\n break;\n case \"Symlink\":\n case 4:\n m.Type = 4;\n break;\n case \"HAMTShard\":\n case 5:\n m.Type = 5;\n break;\n }\n if (d.Data != null) {\n if (typeof d.Data === \"string\")\n $util.base64.decode(d.Data, m.Data = $util.newBuffer($util.base64.length(d.Data)), 0);\n else if (d.Data.length)\n m.Data = d.Data;\n }\n if (d.filesize != null) {\n if ($util.Long)\n (m.filesize = $util.Long.fromValue(d.filesize)).unsigned = true;\n else if (typeof d.filesize === \"string\")\n m.filesize = parseInt(d.filesize, 10);\n else if (typeof d.filesize === \"number\")\n m.filesize = d.filesize;\n else if (typeof d.filesize === \"object\")\n m.filesize = new $util.LongBits(d.filesize.low >>> 0, d.filesize.high >>> 0).toNumber(true);\n }\n if (d.blocksizes) {\n if (!Array.isArray(d.blocksizes))\n throw TypeError(\".Data.blocksizes: array expected\");\n m.blocksizes = [];\n for (var i = 0; i < d.blocksizes.length; ++i) {\n if ($util.Long)\n (m.blocksizes[i] = $util.Long.fromValue(d.blocksizes[i])).unsigned = true;\n else if (typeof d.blocksizes[i] === \"string\")\n m.blocksizes[i] = parseInt(d.blocksizes[i], 10);\n else if (typeof d.blocksizes[i] === \"number\")\n m.blocksizes[i] = d.blocksizes[i];\n else if (typeof d.blocksizes[i] === \"object\")\n m.blocksizes[i] = new $util.LongBits(d.blocksizes[i].low >>> 0, d.blocksizes[i].high >>> 0).toNumber(true);\n }\n }\n if (d.hashType != null) {\n if ($util.Long)\n (m.hashType = $util.Long.fromValue(d.hashType)).unsigned = true;\n else if (typeof d.hashType === \"string\")\n m.hashType = parseInt(d.hashType, 10);\n else if (typeof d.hashType === \"number\")\n m.hashType = d.hashType;\n else if (typeof d.hashType === \"object\")\n m.hashType = new $util.LongBits(d.hashType.low >>> 0, d.hashType.high >>> 0).toNumber(true);\n }\n if (d.fanout != null) {\n if ($util.Long)\n (m.fanout = $util.Long.fromValue(d.fanout)).unsigned = true;\n else if (typeof d.fanout === \"string\")\n m.fanout = parseInt(d.fanout, 10);\n else if (typeof d.fanout === \"number\")\n m.fanout = d.fanout;\n else if (typeof d.fanout === \"object\")\n m.fanout = new $util.LongBits(d.fanout.low >>> 0, d.fanout.high >>> 0).toNumber(true);\n }\n if (d.mode != null) {\n m.mode = d.mode >>> 0;\n }\n if (d.mtime != null) {\n if (typeof d.mtime !== \"object\")\n throw TypeError(\".Data.mtime: object expected\");\n m.mtime = $root.UnixTime.fromObject(d.mtime);\n }\n return m;\n };\n\n /**\n * Creates a plain object from a Data message. Also converts values to other types if specified.\n * @function toObject\n * @memberof Data\n * @static\n * @param {Data} m Data\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n Data.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.arrays || o.defaults) {\n d.blocksizes = [];\n }\n if (o.defaults) {\n d.Type = o.enums === String ? \"Raw\" : 0;\n if (o.bytes === String)\n d.Data = \"\";\n else {\n d.Data = [];\n if (o.bytes !== Array)\n d.Data = $util.newBuffer(d.Data);\n }\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.filesize = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.filesize = o.longs === String ? \"0\" : 0;\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.hashType = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.hashType = o.longs === String ? \"0\" : 0;\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.fanout = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.fanout = o.longs === String ? \"0\" : 0;\n d.mode = 0;\n d.mtime = null;\n }\n if (m.Type != null && m.hasOwnProperty(\"Type\")) {\n d.Type = o.enums === String ? $root.Data.DataType[m.Type] : m.Type;\n }\n if (m.Data != null && m.hasOwnProperty(\"Data\")) {\n d.Data = o.bytes === String ? $util.base64.encode(m.Data, 0, m.Data.length) : o.bytes === Array ? Array.prototype.slice.call(m.Data) : m.Data;\n }\n if (m.filesize != null && m.hasOwnProperty(\"filesize\")) {\n if (typeof m.filesize === \"number\")\n d.filesize = o.longs === String ? String(m.filesize) : m.filesize;\n else\n d.filesize = o.longs === String ? $util.Long.prototype.toString.call(m.filesize) : o.longs === Number ? new $util.LongBits(m.filesize.low >>> 0, m.filesize.high >>> 0).toNumber(true) : m.filesize;\n }\n if (m.blocksizes && m.blocksizes.length) {\n d.blocksizes = [];\n for (var j = 0; j < m.blocksizes.length; ++j) {\n if (typeof m.blocksizes[j] === \"number\")\n d.blocksizes[j] = o.longs === String ? String(m.blocksizes[j]) : m.blocksizes[j];\n else\n d.blocksizes[j] = o.longs === String ? $util.Long.prototype.toString.call(m.blocksizes[j]) : o.longs === Number ? new $util.LongBits(m.blocksizes[j].low >>> 0, m.blocksizes[j].high >>> 0).toNumber(true) : m.blocksizes[j];\n }\n }\n if (m.hashType != null && m.hasOwnProperty(\"hashType\")) {\n if (typeof m.hashType === \"number\")\n d.hashType = o.longs === String ? String(m.hashType) : m.hashType;\n else\n d.hashType = o.longs === String ? $util.Long.prototype.toString.call(m.hashType) : o.longs === Number ? new $util.LongBits(m.hashType.low >>> 0, m.hashType.high >>> 0).toNumber(true) : m.hashType;\n }\n if (m.fanout != null && m.hasOwnProperty(\"fanout\")) {\n if (typeof m.fanout === \"number\")\n d.fanout = o.longs === String ? String(m.fanout) : m.fanout;\n else\n d.fanout = o.longs === String ? $util.Long.prototype.toString.call(m.fanout) : o.longs === Number ? new $util.LongBits(m.fanout.low >>> 0, m.fanout.high >>> 0).toNumber(true) : m.fanout;\n }\n if (m.mode != null && m.hasOwnProperty(\"mode\")) {\n d.mode = m.mode;\n }\n if (m.mtime != null && m.hasOwnProperty(\"mtime\")) {\n d.mtime = $root.UnixTime.toObject(m.mtime, o);\n }\n return d;\n };\n\n /**\n * Converts this Data to JSON.\n * @function toJSON\n * @memberof Data\n * @instance\n * @returns {Object.} JSON object\n */\n Data.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * DataType enum.\n * @name Data.DataType\n * @enum {number}\n * @property {number} Raw=0 Raw value\n * @property {number} Directory=1 Directory value\n * @property {number} File=2 File value\n * @property {number} Metadata=3 Metadata value\n * @property {number} Symlink=4 Symlink value\n * @property {number} HAMTShard=5 HAMTShard value\n */\n Data.DataType = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"Raw\"] = 0;\n values[valuesById[1] = \"Directory\"] = 1;\n values[valuesById[2] = \"File\"] = 2;\n values[valuesById[3] = \"Metadata\"] = 3;\n values[valuesById[4] = \"Symlink\"] = 4;\n values[valuesById[5] = \"HAMTShard\"] = 5;\n return values;\n })();\n\n return Data;\n})();\n\n$root.UnixTime = (function() {\n\n /**\n * Properties of an UnixTime.\n * @exports IUnixTime\n * @interface IUnixTime\n * @property {number} Seconds UnixTime Seconds\n * @property {number|null} [FractionalNanoseconds] UnixTime FractionalNanoseconds\n */\n\n /**\n * Constructs a new UnixTime.\n * @exports UnixTime\n * @classdesc Represents an UnixTime.\n * @implements IUnixTime\n * @constructor\n * @param {IUnixTime=} [p] Properties to set\n */\n function UnixTime(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * UnixTime Seconds.\n * @member {number} Seconds\n * @memberof UnixTime\n * @instance\n */\n UnixTime.prototype.Seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * UnixTime FractionalNanoseconds.\n * @member {number} FractionalNanoseconds\n * @memberof UnixTime\n * @instance\n */\n UnixTime.prototype.FractionalNanoseconds = 0;\n\n /**\n * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages.\n * @function encode\n * @memberof UnixTime\n * @static\n * @param {IUnixTime} m UnixTime message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n UnixTime.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n w.uint32(8).int64(m.Seconds);\n if (m.FractionalNanoseconds != null && Object.hasOwnProperty.call(m, \"FractionalNanoseconds\"))\n w.uint32(21).fixed32(m.FractionalNanoseconds);\n return w;\n };\n\n /**\n * Decodes an UnixTime message from the specified reader or buffer.\n * @function decode\n * @memberof UnixTime\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {UnixTime} UnixTime\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n UnixTime.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.UnixTime();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.Seconds = r.int64();\n break;\n case 2:\n m.FractionalNanoseconds = r.fixed32();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n if (!m.hasOwnProperty(\"Seconds\"))\n throw $util.ProtocolError(\"missing required 'Seconds'\", { instance: m });\n return m;\n };\n\n /**\n * Creates an UnixTime message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof UnixTime\n * @static\n * @param {Object.} d Plain object\n * @returns {UnixTime} UnixTime\n */\n UnixTime.fromObject = function fromObject(d) {\n if (d instanceof $root.UnixTime)\n return d;\n var m = new $root.UnixTime();\n if (d.Seconds != null) {\n if ($util.Long)\n (m.Seconds = $util.Long.fromValue(d.Seconds)).unsigned = false;\n else if (typeof d.Seconds === \"string\")\n m.Seconds = parseInt(d.Seconds, 10);\n else if (typeof d.Seconds === \"number\")\n m.Seconds = d.Seconds;\n else if (typeof d.Seconds === \"object\")\n m.Seconds = new $util.LongBits(d.Seconds.low >>> 0, d.Seconds.high >>> 0).toNumber();\n }\n if (d.FractionalNanoseconds != null) {\n m.FractionalNanoseconds = d.FractionalNanoseconds >>> 0;\n }\n return m;\n };\n\n /**\n * Creates a plain object from an UnixTime message. Also converts values to other types if specified.\n * @function toObject\n * @memberof UnixTime\n * @static\n * @param {UnixTime} m UnixTime\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n UnixTime.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.defaults) {\n if ($util.Long) {\n var n = new $util.Long(0, 0, false);\n d.Seconds = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.Seconds = o.longs === String ? \"0\" : 0;\n d.FractionalNanoseconds = 0;\n }\n if (m.Seconds != null && m.hasOwnProperty(\"Seconds\")) {\n if (typeof m.Seconds === \"number\")\n d.Seconds = o.longs === String ? String(m.Seconds) : m.Seconds;\n else\n d.Seconds = o.longs === String ? $util.Long.prototype.toString.call(m.Seconds) : o.longs === Number ? new $util.LongBits(m.Seconds.low >>> 0, m.Seconds.high >>> 0).toNumber() : m.Seconds;\n }\n if (m.FractionalNanoseconds != null && m.hasOwnProperty(\"FractionalNanoseconds\")) {\n d.FractionalNanoseconds = m.FractionalNanoseconds;\n }\n return d;\n };\n\n /**\n * Converts this UnixTime to JSON.\n * @function toJSON\n * @memberof UnixTime\n * @instance\n * @returns {Object.} JSON object\n */\n UnixTime.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n return UnixTime;\n})();\n\n$root.Metadata = (function() {\n\n /**\n * Properties of a Metadata.\n * @exports IMetadata\n * @interface IMetadata\n * @property {string|null} [MimeType] Metadata MimeType\n */\n\n /**\n * Constructs a new Metadata.\n * @exports Metadata\n * @classdesc Represents a Metadata.\n * @implements IMetadata\n * @constructor\n * @param {IMetadata=} [p] Properties to set\n */\n function Metadata(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * Metadata MimeType.\n * @member {string} MimeType\n * @memberof Metadata\n * @instance\n */\n Metadata.prototype.MimeType = \"\";\n\n /**\n * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.\n * @function encode\n * @memberof Metadata\n * @static\n * @param {IMetadata} m Metadata message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Metadata.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n if (m.MimeType != null && Object.hasOwnProperty.call(m, \"MimeType\"))\n w.uint32(10).string(m.MimeType);\n return w;\n };\n\n /**\n * Decodes a Metadata message from the specified reader or buffer.\n * @function decode\n * @memberof Metadata\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {Metadata} Metadata\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Metadata.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.MimeType = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n /**\n * Creates a Metadata message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof Metadata\n * @static\n * @param {Object.} d Plain object\n * @returns {Metadata} Metadata\n */\n Metadata.fromObject = function fromObject(d) {\n if (d instanceof $root.Metadata)\n return d;\n var m = new $root.Metadata();\n if (d.MimeType != null) {\n m.MimeType = String(d.MimeType);\n }\n return m;\n };\n\n /**\n * Creates a plain object from a Metadata message. Also converts values to other types if specified.\n * @function toObject\n * @memberof Metadata\n * @static\n * @param {Metadata} m Metadata\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n Metadata.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.defaults) {\n d.MimeType = \"\";\n }\n if (m.MimeType != null && m.hasOwnProperty(\"MimeType\")) {\n d.MimeType = m.MimeType;\n }\n return d;\n };\n\n /**\n * Converts this Metadata to JSON.\n * @function toJSON\n * @memberof Metadata\n * @instance\n * @returns {Object.} JSON object\n */\n Metadata.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n return Metadata;\n})();\n\nmodule.exports = $root;\n","'use strict'\n\n// @ts-ignore\nconst BufferList = require('bl/BufferList')\n\n/**\n * @type {import('../types').Chunker}\n */\nmodule.exports = async function * fixedSizeChunker (source, options) {\n let bl = new BufferList()\n let currentLength = 0\n let emitted = false\n const maxChunkSize = options.maxChunkSize\n\n for await (const buffer of source) {\n bl.append(buffer)\n\n currentLength += buffer.length\n\n while (currentLength >= maxChunkSize) {\n yield bl.slice(0, maxChunkSize)\n emitted = true\n\n // throw away consumed bytes\n if (maxChunkSize === bl.length) {\n bl = new BufferList()\n currentLength = 0\n } else {\n const newBl = new BufferList()\n newBl.append(bl.shallowSlice(maxChunkSize))\n bl = newBl\n\n // update our offset\n currentLength -= maxChunkSize\n }\n }\n }\n\n if (!emitted || currentLength) {\n // return any remaining bytes or an empty buffer\n yield bl.slice(0, currentLength)\n }\n}\n","'use strict'\n\n// @ts-ignore\nconst BufferList = require('bl/BufferList')\n// @ts-ignore\nconst { create } = require('rabin-wasm')\nconst errcode = require('err-code')\n\n/**\n * @typedef {object} RabinOptions\n * @property {number} min\n * @property {number} max\n * @property {number} bits\n * @property {number} window\n * @property {number} polynomial\n */\n\n/**\n * @type {import('../types').Chunker}\n */\nmodule.exports = async function * rabinChunker (source, options) {\n let min, max, avg\n\n if (options.minChunkSize && options.maxChunkSize && options.avgChunkSize) {\n avg = options.avgChunkSize\n min = options.minChunkSize\n max = options.maxChunkSize\n } else if (!options.avgChunkSize) {\n throw errcode(new Error('please specify an average chunk size'), 'ERR_INVALID_AVG_CHUNK_SIZE')\n } else {\n avg = options.avgChunkSize\n min = avg / 3\n max = avg + (avg / 2)\n }\n\n // validate min/max/avg in the same way as go\n if (min < 16) {\n throw errcode(new Error('rabin min must be greater than 16'), 'ERR_INVALID_MIN_CHUNK_SIZE')\n }\n\n if (max < min) {\n max = min\n }\n\n if (avg < min) {\n avg = min\n }\n\n const sizepow = Math.floor(Math.log2(avg))\n\n for await (const chunk of rabin(source, {\n min: min,\n max: max,\n bits: sizepow,\n window: options.window,\n polynomial: options.polynomial\n })) {\n yield chunk\n }\n}\n\n/**\n * @param {AsyncIterable} source\n * @param {RabinOptions} options\n */\nasync function * rabin (source, options) {\n const r = await create(options.bits, options.min, options.max, options.window)\n const buffers = new BufferList()\n\n for await (const chunk of source) {\n buffers.append(chunk)\n\n const sizes = r.fingerprint(chunk)\n\n for (let i = 0; i < sizes.length; i++) {\n const size = sizes[i]\n const buf = buffers.slice(0, size)\n buffers.consume(size)\n\n yield buf\n }\n }\n\n if (buffers.length) {\n yield buffers.slice(0)\n }\n}\n","'use strict'\n\nconst { UnixFS } = require('ipfs-unixfs')\nconst persist = require('../utils/persist')\nconst { encode, prepare } = require('@ipld/dag-pb')\n\n/**\n * @typedef {import('../types').Directory} Directory\n */\n\n/**\n * @type {import('../types').UnixFSV1DagBuilder}\n */\nconst dirBuilder = async (item, blockstore, options) => {\n const unixfs = new UnixFS({\n type: 'directory',\n mtime: item.mtime,\n mode: item.mode\n })\n\n const buffer = encode(prepare({ Data: unixfs.marshal() }))\n const cid = await persist(buffer, blockstore, options)\n const path = item.path\n\n return {\n cid,\n path,\n unixfs,\n size: buffer.length\n }\n}\n\nmodule.exports = dirBuilder\n","'use strict'\n\nconst batch = require('it-batch')\n\n/**\n * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder\n */\n\n/**\n * @type {FileDAGBuilder}\n */\nfunction balanced (source, reduce, options) {\n return reduceToParents(source, reduce, options)\n}\n\n/**\n * @type {FileDAGBuilder}\n */\nasync function reduceToParents (source, reduce, options) {\n const roots = []\n\n for await (const chunked of batch(source, options.maxChildrenPerNode)) {\n roots.push(await reduce(chunked))\n }\n\n if (roots.length > 1) {\n return reduceToParents(roots, reduce, options)\n }\n\n return roots[0]\n}\n\nmodule.exports = balanced\n","'use strict'\n\nconst { UnixFS } = require('ipfs-unixfs')\nconst persist = require('../../utils/persist')\nconst dagPb = require('@ipld/dag-pb')\nconst raw = require('multiformats/codecs/raw')\n\n/**\n * @typedef {import('../../types').BufferImporter} BufferImporter\n */\n\n/**\n * @type {BufferImporter}\n */\nasync function * bufferImporter (file, block, options) {\n for await (let buffer of file.content) {\n yield async () => {\n options.progress(buffer.length, file.path)\n let unixfs\n\n /** @type {import('../../types').PersistOptions} */\n const opts = {\n codec: dagPb,\n cidVersion: options.cidVersion,\n hasher: options.hasher,\n onlyHash: options.onlyHash\n }\n\n if (options.rawLeaves) {\n opts.codec = raw\n opts.cidVersion = 1\n } else {\n unixfs = new UnixFS({\n type: options.leafType,\n data: buffer,\n mtime: file.mtime,\n mode: file.mode\n })\n\n buffer = dagPb.encode({\n Data: unixfs.marshal(),\n Links: []\n })\n }\n\n return {\n cid: await persist(buffer, block, opts),\n unixfs,\n size: buffer.length\n }\n }\n }\n}\n\nmodule.exports = bufferImporter\n","'use strict'\n\nconst all = require('it-all')\n\n/**\n * @type {import('../../types').FileDAGBuilder}\n */\nmodule.exports = async function (source, reduce) {\n return reduce(await all(source))\n}\n","'use strict'\n\nconst errCode = require('err-code')\nconst { UnixFS } = require('ipfs-unixfs')\nconst persist = require('../../utils/persist')\nconst { encode, prepare } = require('@ipld/dag-pb')\nconst parallelBatch = require('it-parallel-batch')\nconst rawCodec = require('multiformats/codecs/raw')\nconst dagPb = require('@ipld/dag-pb')\n\n/**\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {import('../../types').File} File\n * @typedef {import('../../types').ImporterOptions} ImporterOptions\n * @typedef {import('../../types').Reducer} Reducer\n * @typedef {import('../../types').DAGBuilder} DAGBuilder\n * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder\n */\n\n/**\n * @type {{ [key: string]: FileDAGBuilder}}\n */\nconst dagBuilders = {\n flat: require('./flat'),\n balanced: require('./balanced'),\n trickle: require('./trickle')\n}\n\n/**\n * @param {File} file\n * @param {Blockstore} blockstore\n * @param {ImporterOptions} options\n */\nasync function * buildFileBatch (file, blockstore, options) {\n let count = -1\n let previous\n let bufferImporter\n\n if (typeof options.bufferImporter === 'function') {\n bufferImporter = options.bufferImporter\n } else {\n bufferImporter = require('./buffer-importer')\n }\n\n for await (const entry of parallelBatch(bufferImporter(file, blockstore, options), options.blockWriteConcurrency)) {\n count++\n\n if (count === 0) {\n previous = entry\n continue\n } else if (count === 1 && previous) {\n yield previous\n previous = null\n }\n\n yield entry\n }\n\n if (previous) {\n previous.single = true\n yield previous\n }\n}\n\n/**\n * @param {File} file\n * @param {Blockstore} blockstore\n * @param {ImporterOptions} options\n */\nconst reduce = (file, blockstore, options) => {\n /**\n * @type {Reducer}\n */\n async function reducer (leaves) {\n if (leaves.length === 1 && leaves[0].single && options.reduceSingleLeafToSelf) {\n const leaf = leaves[0]\n\n if (leaf.cid.code === rawCodec.code && (file.mtime !== undefined || file.mode !== undefined)) {\n // only one leaf node which is a buffer - we have metadata so convert it into a\n // UnixFS entry otherwise we'll have nowhere to store the metadata\n let buffer = await blockstore.get(leaf.cid)\n\n leaf.unixfs = new UnixFS({\n type: 'file',\n mtime: file.mtime,\n mode: file.mode,\n data: buffer\n })\n\n buffer = encode(prepare({ Data: leaf.unixfs.marshal() }))\n\n // // TODO vmx 2021-03-26: This is what the original code does, it checks\n // // the multihash of the original leaf node and uses then the same\n // // hasher. i wonder if that's really needed or if we could just use\n // // the hasher from `options.hasher` instead.\n // const multihash = mh.decode(leaf.cid.multihash.bytes)\n // let hasher\n // switch multihash {\n // case sha256.code {\n // hasher = sha256\n // break;\n // }\n // //case identity.code {\n // // hasher = identity\n // // break;\n // //}\n // default: {\n // throw new Error(`Unsupported hasher \"${multihash}\"`)\n // }\n // }\n leaf.cid = await persist(buffer, blockstore, {\n ...options,\n codec: dagPb,\n hasher: options.hasher,\n cidVersion: options.cidVersion\n })\n leaf.size = buffer.length\n }\n\n return {\n cid: leaf.cid,\n path: file.path,\n unixfs: leaf.unixfs,\n size: leaf.size\n }\n }\n\n // create a parent node and add all the leaves\n const f = new UnixFS({\n type: 'file',\n mtime: file.mtime,\n mode: file.mode\n })\n\n const links = leaves\n .filter(leaf => {\n if (leaf.cid.code === rawCodec.code && leaf.size) {\n return true\n }\n\n if (leaf.unixfs && !leaf.unixfs.data && leaf.unixfs.fileSize()) {\n return true\n }\n\n return Boolean(leaf.unixfs && leaf.unixfs.data && leaf.unixfs.data.length)\n })\n .map((leaf) => {\n if (leaf.cid.code === rawCodec.code) {\n // node is a leaf buffer\n f.addBlockSize(leaf.size)\n\n return {\n Name: '',\n Tsize: leaf.size,\n Hash: leaf.cid\n }\n }\n\n if (!leaf.unixfs || !leaf.unixfs.data) {\n // node is an intermediate node\n f.addBlockSize((leaf.unixfs && leaf.unixfs.fileSize()) || 0)\n } else {\n // node is a unixfs 'file' leaf node\n f.addBlockSize(leaf.unixfs.data.length)\n }\n\n return {\n Name: '',\n Tsize: leaf.size,\n Hash: leaf.cid\n }\n })\n\n const node = {\n Data: f.marshal(),\n Links: links\n }\n const buffer = encode(prepare(node))\n const cid = await persist(buffer, blockstore, options)\n\n return {\n cid,\n path: file.path,\n unixfs: f,\n size: buffer.length + node.Links.reduce((acc, curr) => acc + curr.Tsize, 0)\n }\n }\n\n return reducer\n}\n\n/**\n * @type {import('../../types').UnixFSV1DagBuilder}\n */\nfunction fileBuilder (file, block, options) {\n const dagBuilder = dagBuilders[options.strategy]\n\n if (!dagBuilder) {\n throw errCode(new Error(`Unknown importer build strategy name: ${options.strategy}`), 'ERR_BAD_STRATEGY')\n }\n\n return dagBuilder(buildFileBatch(file, block, options), reduce(file, block, options), options)\n}\n\nmodule.exports = fileBuilder\n","'use strict'\n\nconst batch = require('it-batch')\n\n/**\n * @typedef {import('ipfs-unixfs').UnixFS} UnixFS\n * @typedef {import('../../types').ImporterOptions} ImporterOptions\n * @typedef {import('../../types').InProgressImportResult} InProgressImportResult\n * @typedef {import('../../types').TrickleDagNode} TrickleDagNode\n * @typedef {import('../../types').Reducer} Reducer\n * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder\n */\n\n/**\n * @type {FileDAGBuilder}\n */\nmodule.exports = async function trickleStream (source, reduce, options) {\n const root = new Root(options.layerRepeat)\n let iteration = 0\n let maxDepth = 1\n\n /** @type {SubTree} */\n let subTree = root\n\n for await (const layer of batch(source, options.maxChildrenPerNode)) {\n if (subTree.isFull()) {\n if (subTree !== root) {\n root.addChild(await subTree.reduce(reduce))\n }\n\n if (iteration && iteration % options.layerRepeat === 0) {\n maxDepth++\n }\n\n subTree = new SubTree(maxDepth, options.layerRepeat, iteration)\n\n iteration++\n }\n\n subTree.append(layer)\n }\n\n if (subTree && subTree !== root) {\n root.addChild(await subTree.reduce(reduce))\n }\n\n return root.reduce(reduce)\n}\n\nclass SubTree {\n /**\n * @param {number} maxDepth\n * @param {number} layerRepeat\n * @param {number} [iteration=0]\n */\n constructor (maxDepth, layerRepeat, iteration = 0) {\n this.maxDepth = maxDepth\n this.layerRepeat = layerRepeat\n this.currentDepth = 1\n this.iteration = iteration\n\n /** @type {TrickleDagNode} */\n this.root = this.node = this.parent = {\n children: [],\n depth: this.currentDepth,\n maxDepth,\n maxChildren: (this.maxDepth - this.currentDepth) * this.layerRepeat\n }\n }\n\n isFull () {\n if (!this.root.data) {\n return false\n }\n\n if (this.currentDepth < this.maxDepth && this.node.maxChildren) {\n // can descend\n this._addNextNodeToParent(this.node)\n\n return false\n }\n\n // try to find new node from node.parent\n const distantRelative = this._findParent(this.node, this.currentDepth)\n\n if (distantRelative) {\n this._addNextNodeToParent(distantRelative)\n\n return false\n }\n\n return true\n }\n\n /**\n * @param {TrickleDagNode} parent\n */\n _addNextNodeToParent (parent) {\n this.parent = parent\n\n // find site for new node\n const nextNode = {\n children: [],\n depth: parent.depth + 1,\n parent,\n maxDepth: this.maxDepth,\n maxChildren: Math.floor(parent.children.length / this.layerRepeat) * this.layerRepeat\n }\n\n // @ts-ignore\n parent.children.push(nextNode)\n\n this.currentDepth = nextNode.depth\n this.node = nextNode\n }\n\n /**\n *\n * @param {InProgressImportResult[]} layer\n */\n append (layer) {\n this.node.data = layer\n }\n\n /**\n * @param {Reducer} reduce\n */\n reduce (reduce) {\n return this._reduce(this.root, reduce)\n }\n\n /**\n * @param {TrickleDagNode} node\n * @param {Reducer} reduce\n * @returns {Promise}\n */\n async _reduce (node, reduce) {\n /** @type {InProgressImportResult[]} */\n let children = []\n\n if (node.children.length) {\n children = await Promise.all(\n node.children\n // @ts-ignore\n .filter(child => child.data)\n // @ts-ignore\n .map(child => this._reduce(child, reduce))\n )\n }\n\n return reduce((node.data || []).concat(children))\n }\n\n /**\n * @param {TrickleDagNode} node\n * @param {number} depth\n * @returns {TrickleDagNode | undefined}\n */\n _findParent (node, depth) {\n const parent = node.parent\n\n if (!parent || parent.depth === 0) {\n return\n }\n\n if (parent.children.length === parent.maxChildren || !parent.maxChildren) {\n // this layer is full, may be able to traverse to a different branch\n return this._findParent(parent, depth)\n }\n\n return parent\n }\n}\n\nclass Root extends SubTree {\n /**\n * @param {number} layerRepeat\n */\n constructor (layerRepeat) {\n super(0, layerRepeat)\n\n this.root.depth = 0\n this.currentDepth = 1\n }\n\n /**\n * @param {InProgressImportResult} child\n */\n addChild (child) {\n this.root.children.push(child)\n }\n\n /**\n * @param {Reducer} reduce\n */\n reduce (reduce) {\n return reduce((this.root.data || []).concat(this.root.children))\n }\n}\n","'use strict'\n\nconst dirBuilder = require('./dir')\nconst fileBuilder = require('./file')\nconst errCode = require('err-code')\n\n/**\n * @typedef {import('../types').File} File\n * @typedef {import('../types').Directory} Directory\n * @typedef {import('../types').DAGBuilder} DAGBuilder\n * @typedef {import('../types').Chunker} Chunker\n * @typedef {import('../types').ChunkValidator} ChunkValidator\n */\n\n/**\n * @param {any} thing\n * @returns {thing is Iterable}\n */\nfunction isIterable (thing) {\n return Symbol.iterator in thing\n}\n\n/**\n * @param {any} thing\n * @returns {thing is AsyncIterable}\n */\nfunction isAsyncIterable (thing) {\n return Symbol.asyncIterator in thing\n}\n\n/**\n * @param {Uint8Array | AsyncIterable | Iterable} content\n * @returns {AsyncIterable}\n */\nfunction contentAsAsyncIterable (content) {\n try {\n if (content instanceof Uint8Array) {\n return (async function * () {\n yield content\n }())\n } else if (isIterable(content)) {\n return (async function * () {\n yield * content\n }())\n } else if (isAsyncIterable(content)) {\n return content\n }\n } catch {\n throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT')\n }\n\n throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT')\n}\n\n/**\n * @type {DAGBuilder}\n */\nasync function * dagBuilder (source, blockstore, options) {\n for await (const entry of source) {\n if (entry.path) {\n if (entry.path.substring(0, 2) === './') {\n options.wrapWithDirectory = true\n }\n\n entry.path = entry.path\n .split('/')\n .filter(path => path && path !== '.')\n .join('/')\n }\n\n if (entry.content) {\n /**\n * @type {Chunker}\n */\n let chunker\n\n if (typeof options.chunker === 'function') {\n chunker = options.chunker\n } else if (options.chunker === 'rabin') {\n chunker = require('../chunker/rabin')\n } else {\n chunker = require('../chunker/fixed-size')\n }\n\n /**\n * @type {ChunkValidator}\n */\n let chunkValidator\n\n if (typeof options.chunkValidator === 'function') {\n chunkValidator = options.chunkValidator\n } else {\n chunkValidator = require('./validate-chunks')\n }\n\n /** @type {File} */\n const file = {\n path: entry.path,\n mtime: entry.mtime,\n mode: entry.mode,\n content: chunker(chunkValidator(contentAsAsyncIterable(entry.content), options), options)\n }\n\n yield () => fileBuilder(file, blockstore, options)\n } else if (entry.path) {\n /** @type {Directory} */\n const dir = {\n path: entry.path,\n mtime: entry.mtime,\n mode: entry.mode\n }\n\n yield () => dirBuilder(dir, blockstore, options)\n } else {\n throw new Error('Import candidate must have content or path or both')\n }\n }\n}\n\nmodule.exports = dagBuilder\n","'use strict'\n\nconst errCode = require('err-code')\nconst uint8ArrayFromString = require('uint8arrays/from-string')\n\n/**\n * @typedef {import('../types').ChunkValidator} ChunkValidator\n */\n\n/**\n * @type {ChunkValidator}\n */\nasync function * validateChunks (source) {\n for await (const content of source) {\n if (content.length === undefined) {\n throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT')\n }\n\n if (typeof content === 'string' || content instanceof String) {\n yield uint8ArrayFromString(content.toString())\n } else if (Array.isArray(content)) {\n yield Uint8Array.from(content)\n } else if (content instanceof Uint8Array) {\n yield content\n } else {\n throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT')\n }\n }\n}\n\nmodule.exports = validateChunks\n","'use strict'\n\nconst { encode, prepare } = require('@ipld/dag-pb')\nconst { UnixFS } = require('ipfs-unixfs')\nconst Dir = require('./dir')\nconst persist = require('./utils/persist')\n\n/**\n * @typedef {import('./types').ImporterOptions} ImporterOptions\n * @typedef {import('./types').ImportResult} ImportResult\n * @typedef {import('./types').InProgressImportResult} InProgressImportResult\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {import('./dir').DirProps} DirProps\n * @typedef {import('@ipld/dag-pb').PBNode} PBNode\n * @typedef {import('@ipld/dag-pb').PBLink} PBLink\n */\n\nclass DirFlat extends Dir {\n /**\n * @param {DirProps} props\n * @param {ImporterOptions} options\n */\n constructor (props, options) {\n super(props, options)\n\n /** @type {{ [key: string]: InProgressImportResult | Dir }} */\n this._children = {}\n }\n\n /**\n * @param {string} name\n * @param {InProgressImportResult | Dir} value\n */\n async put (name, value) {\n this.cid = undefined\n this.size = undefined\n\n this._children[name] = value\n }\n\n /**\n * @param {string} name\n */\n get (name) {\n return Promise.resolve(this._children[name])\n }\n\n childCount () {\n return Object.keys(this._children).length\n }\n\n directChildrenCount () {\n return this.childCount()\n }\n\n onlyChild () {\n return this._children[Object.keys(this._children)[0]]\n }\n\n async * eachChildSeries () {\n const keys = Object.keys(this._children)\n\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i]\n\n yield {\n key: key,\n child: this._children[key]\n }\n }\n }\n\n /**\n * @param {Blockstore} block\n * @returns {AsyncIterable}\n */\n async * flush (block) {\n const children = Object.keys(this._children)\n const links = []\n\n for (let i = 0; i < children.length; i++) {\n let child = this._children[children[i]]\n\n if (child instanceof Dir) {\n for await (const entry of child.flush(block)) {\n child = entry\n\n yield child\n }\n }\n\n if (child.size != null && child.cid) {\n links.push({\n Name: children[i],\n Tsize: child.size,\n Hash: child.cid\n })\n }\n }\n\n const unixfs = new UnixFS({\n type: 'directory',\n mtime: this.mtime,\n mode: this.mode\n })\n\n /** @type {PBNode} */\n const node = { Data: unixfs.marshal(), Links: links }\n const buffer = encode(prepare(node))\n const cid = await persist(buffer, block, this.options)\n const size = buffer.length + node.Links.reduce(\n /**\n * @param {number} acc\n * @param {PBLink} curr\n */\n (acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize),\n 0)\n\n this.cid = cid\n this.size = size\n\n yield {\n cid,\n unixfs,\n path: this.path,\n size\n }\n }\n}\n\nmodule.exports = DirFlat\n","'use strict'\n\nconst { encode, prepare } = require('@ipld/dag-pb')\nconst { UnixFS } = require('ipfs-unixfs')\nconst Dir = require('./dir')\nconst persist = require('./utils/persist')\nconst { createHAMT, Bucket } = require('hamt-sharding')\n\n/**\n * @typedef {import('./types').ImporterOptions} ImporterOptions\n * @typedef {import('./types').ImportResult} ImportResult\n * @typedef {import('./types').InProgressImportResult} InProgressImportResult\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n */\n\n/**\n * @typedef {import('./dir').DirProps} DirProps\n */\n\nclass DirSharded extends Dir {\n /**\n * @param {DirProps} props\n * @param {ImporterOptions} options\n */\n constructor (props, options) {\n super(props, options)\n\n /** @type {Bucket} */\n this._bucket = createHAMT({\n hashFn: options.hamtHashFn,\n bits: options.hamtBucketBits\n })\n }\n\n /**\n * @param {string} name\n * @param {InProgressImportResult | Dir} value\n */\n async put (name, value) {\n await this._bucket.put(name, value)\n }\n\n /**\n * @param {string} name\n */\n get (name) {\n return this._bucket.get(name)\n }\n\n childCount () {\n return this._bucket.leafCount()\n }\n\n directChildrenCount () {\n return this._bucket.childrenCount()\n }\n\n onlyChild () {\n return this._bucket.onlyChild()\n }\n\n async * eachChildSeries () {\n for await (const { key, value } of this._bucket.eachLeafSeries()) {\n yield {\n key,\n child: value\n }\n }\n }\n\n /**\n * @param {Blockstore} blockstore\n * @returns {AsyncIterable}\n */\n async * flush (blockstore) {\n for await (const entry of flush(this._bucket, blockstore, this, this.options)) {\n yield {\n ...entry,\n path: this.path\n }\n }\n }\n}\n\nmodule.exports = DirSharded\n\n/**\n * @param {Bucket>} bucket\n * @param {Blockstore} blockstore\n * @param {*} shardRoot\n * @param {ImporterOptions} options\n * @returns {AsyncIterable}\n */\nasync function * flush (bucket, blockstore, shardRoot, options) {\n const children = bucket._children\n const links = []\n let childrenSize = 0\n\n for (let i = 0; i < children.length; i++) {\n const child = children.get(i)\n\n if (!child) {\n continue\n }\n\n const labelPrefix = i.toString(16).toUpperCase().padStart(2, '0')\n\n if (child instanceof Bucket) {\n let shard\n\n for await (const subShard of await flush(child, blockstore, null, options)) {\n shard = subShard\n }\n\n if (!shard) {\n throw new Error('Could not flush sharded directory, no subshard found')\n }\n\n links.push({\n Name: labelPrefix,\n Tsize: shard.size,\n Hash: shard.cid\n })\n childrenSize += shard.size\n } else if (typeof child.value.flush === 'function') {\n const dir = child.value\n let flushedDir\n\n for await (const entry of dir.flush(blockstore)) {\n flushedDir = entry\n\n yield flushedDir\n }\n\n const label = labelPrefix + child.key\n links.push({\n Name: label,\n Tsize: flushedDir.size,\n Hash: flushedDir.cid\n })\n\n childrenSize += flushedDir.size\n } else {\n const value = child.value\n\n if (!value.cid) {\n continue\n }\n\n const label = labelPrefix + child.key\n const size = value.size\n\n links.push({\n Name: label,\n Tsize: size,\n Hash: value.cid\n })\n childrenSize += size\n }\n }\n\n // go-ipfs uses little endian, that's why we have to\n // reverse the bit field before storing it\n const data = Uint8Array.from(children.bitField().reverse())\n const dir = new UnixFS({\n type: 'hamt-sharded-directory',\n data,\n fanout: bucket.tableSize(),\n hashType: options.hamtHashCode,\n mtime: shardRoot && shardRoot.mtime,\n mode: shardRoot && shardRoot.mode\n })\n\n const node = {\n Data: dir.marshal(),\n Links: links\n }\n const buffer = encode(prepare(node))\n const cid = await persist(buffer, blockstore, options)\n const size = buffer.length + childrenSize\n\n yield {\n cid,\n unixfs: dir,\n size\n }\n}\n","'use strict'\n\n/**\n * @typedef {import('./types').ImporterOptions} ImporterOptions\n * @typedef {import('./types').ImportResult} ImportResult\n * @typedef {import('./types').InProgressImportResult} InProgressImportResult\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {import('multiformats/cid').CID} CID\n * @typedef {object} DirProps\n * @property {boolean} root\n * @property {boolean} dir\n * @property {string} path\n * @property {boolean} dirty\n * @property {boolean} flat\n * @property {Dir} [parent]\n * @property {string} [parentKey]\n * @property {import('ipfs-unixfs').UnixFS} [unixfs]\n * @property {number} [mode]\n * @property {import('ipfs-unixfs').Mtime} [mtime]\n */\nclass Dir {\n /**\n *\n * @param {DirProps} props\n * @param {ImporterOptions} options\n */\n constructor (props, options) {\n this.options = options || {}\n\n this.root = props.root\n this.dir = props.dir\n this.path = props.path\n this.dirty = props.dirty\n this.flat = props.flat\n this.parent = props.parent\n this.parentKey = props.parentKey\n this.unixfs = props.unixfs\n this.mode = props.mode\n this.mtime = props.mtime\n\n /** @type {CID | undefined} */\n this.cid = undefined\n /** @type {number | undefined} */\n this.size = undefined\n }\n\n /**\n * @param {string} name\n * @param {InProgressImportResult | Dir} value\n */\n async put (name, value) { }\n\n /**\n * @param {string} name\n * @returns {Promise}\n */\n get (name) {\n return Promise.resolve(this)\n }\n\n /**\n * @returns {AsyncIterable<{ key: string, child: InProgressImportResult | Dir}>}\n */\n async * eachChildSeries () { }\n\n /**\n * @param {Blockstore} blockstore\n * @returns {AsyncIterable}\n */\n async * flush (blockstore) { }\n}\n\nmodule.exports = Dir\n","'use strict'\n\nconst DirSharded = require('./dir-sharded')\nconst DirFlat = require('./dir-flat')\n\n/**\n * @typedef {import('./dir')} Dir\n * @typedef {import('./types').ImporterOptions} ImporterOptions\n */\n\n/**\n * @param {Dir | null} child\n * @param {Dir} dir\n * @param {number} threshold\n * @param {ImporterOptions} options\n * @returns {Promise}\n */\nmodule.exports = async function flatToShard (child, dir, threshold, options) {\n let newDir = dir\n\n if (dir instanceof DirFlat && dir.directChildrenCount() >= threshold) {\n newDir = await convertToShard(dir, options)\n }\n\n const parent = newDir.parent\n\n if (parent) {\n if (newDir !== dir) {\n if (child) {\n child.parent = newDir\n }\n\n if (!newDir.parentKey) {\n throw new Error('No parent key found')\n }\n\n await parent.put(newDir.parentKey, newDir)\n }\n\n return flatToShard(newDir, parent, threshold, options)\n }\n\n // @ts-ignore\n return newDir\n}\n\n/**\n * @param {DirFlat} oldDir\n * @param {ImporterOptions} options\n */\nasync function convertToShard (oldDir, options) {\n const newDir = new DirSharded({\n root: oldDir.root,\n dir: true,\n parent: oldDir.parent,\n parentKey: oldDir.parentKey,\n path: oldDir.path,\n dirty: oldDir.dirty,\n flat: false,\n mtime: oldDir.mtime,\n mode: oldDir.mode\n }, options)\n\n for await (const { key, child } of oldDir.eachChildSeries()) {\n await newDir.put(key, child)\n }\n\n return newDir\n}\n","'use strict'\n\nconst parallelBatch = require('it-parallel-batch')\nconst defaultOptions = require('./options')\n\n/**\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {import('./types').ImportCandidate} ImportCandidate\n * @typedef {import('./types').UserImporterOptions} UserImporterOptions\n * @typedef {import('./types').ImporterOptions} ImporterOptions\n * @typedef {import('./types').Directory} Directory\n * @typedef {import('./types').File} File\n * @typedef {import('./types').ImportResult} ImportResult\n *\n * @typedef {import('./types').Chunker} Chunker\n * @typedef {import('./types').DAGBuilder} DAGBuilder\n * @typedef {import('./types').TreeBuilder} TreeBuilder\n * @typedef {import('./types').BufferImporter} BufferImporter\n * @typedef {import('./types').ChunkValidator} ChunkValidator\n * @typedef {import('./types').Reducer} Reducer\n * @typedef {import('./types').ProgressHandler} ProgressHandler\n */\n\n/**\n * @param {AsyncIterable | Iterable | ImportCandidate} source\n * @param {Blockstore} blockstore\n * @param {UserImporterOptions} options\n */\nasync function * importer (source, blockstore, options = {}) {\n const opts = defaultOptions(options)\n\n let dagBuilder\n\n if (typeof options.dagBuilder === 'function') {\n dagBuilder = options.dagBuilder\n } else {\n dagBuilder = require('./dag-builder')\n }\n\n let treeBuilder\n\n if (typeof options.treeBuilder === 'function') {\n treeBuilder = options.treeBuilder\n } else {\n treeBuilder = require('./tree-builder')\n }\n\n /** @type {AsyncIterable | Iterable} */\n let candidates\n\n if (Symbol.asyncIterator in source || Symbol.iterator in source) {\n // @ts-ignore\n candidates = source\n } else {\n // @ts-ignore\n candidates = [source]\n }\n\n for await (const entry of treeBuilder(parallelBatch(dagBuilder(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) {\n yield {\n cid: entry.cid,\n path: entry.path,\n unixfs: entry.unixfs,\n size: entry.size\n }\n }\n}\n\nmodule.exports = {\n importer\n}\n","'use strict'\n\nconst mergeOptions = require('merge-options').bind({ ignoreUndefined: true })\nconst { sha256 } = require('multiformats/hashes/sha2')\n// @ts-ignore - no types available\nconst mur = require('murmurhash3js-revisited')\nconst uint8ArrayFromString = require('uint8arrays/from-string')\n\n/**\n * @param {Uint8Array} buf\n */\nasync function hamtHashFn (buf) {\n return uint8ArrayFromString(mur.x64.hash128(buf), 'base16')\n // Murmur3 outputs 128 bit but, accidentally, IPFS Go's\n // implementation only uses the first 64, so we must do the same\n // for parity..\n .slice(0, 8)\n // Invert buffer because that's how Go impl does it\n .reverse()\n}\n\n/**\n * @typedef {import('./types').UserImporterOptions} UserImporterOptions\n * @typedef {import('./types').ImporterOptions} ImporterOptions\n */\n\n/**\n * @type {ImporterOptions}\n */\nconst defaultOptions = {\n chunker: 'fixed',\n strategy: 'balanced', // 'flat', 'trickle'\n rawLeaves: false,\n onlyHash: false,\n reduceSingleLeafToSelf: true,\n hasher: sha256,\n leafType: 'file', // 'raw'\n cidVersion: 0,\n progress: () => () => {},\n shardSplitThreshold: 1000,\n fileImportConcurrency: 50,\n blockWriteConcurrency: 10,\n minChunkSize: 262144,\n maxChunkSize: 262144,\n avgChunkSize: 262144,\n window: 16,\n // FIXME: This number is too big for JavaScript\n // https://github.com/ipfs/go-ipfs-chunker/blob/d0125832512163708c0804a3cda060e21acddae4/rabin.go#L11\n polynomial: 17437180132763653, // eslint-disable-line no-loss-of-precision\n maxChildrenPerNode: 174,\n layerRepeat: 4,\n wrapWithDirectory: false,\n recursive: false,\n hidden: false,\n timeout: undefined,\n hamtHashFn,\n hamtHashCode: 0x22,\n hamtBucketBits: 8\n}\n\n/**\n * @param {UserImporterOptions} options\n * @returns {ImporterOptions}\n */\nmodule.exports = function (options = {}) {\n return mergeOptions(defaultOptions, options)\n}\n","'use strict'\n\nconst DirFlat = require('./dir-flat')\nconst flatToShard = require('./flat-to-shard')\nconst Dir = require('./dir')\nconst toPathComponents = require('./utils/to-path-components')\n\n/**\n * @typedef {import('./types').ImportResult} ImportResult\n * @typedef {import('./types').InProgressImportResult} InProgressImportResult\n * @typedef {import('./types').ImporterOptions} ImporterOptions\n * @typedef {import('interface-blockstore').Blockstore} Blockstore\n * @typedef {(source: AsyncIterable, blockstore: Blockstore, options: ImporterOptions) => AsyncIterable} TreeBuilder\n */\n\n/**\n * @param {InProgressImportResult} elem\n * @param {Dir} tree\n * @param {ImporterOptions} options\n */\nasync function addToTree (elem, tree, options) {\n const pathElems = toPathComponents(elem.path || '')\n const lastIndex = pathElems.length - 1\n let parent = tree\n let currentPath = ''\n\n for (let i = 0; i < pathElems.length; i++) {\n const pathElem = pathElems[i]\n\n currentPath += `${currentPath ? '/' : ''}${pathElem}`\n\n const last = (i === lastIndex)\n parent.dirty = true\n parent.cid = undefined\n parent.size = undefined\n\n if (last) {\n await parent.put(pathElem, elem)\n tree = await flatToShard(null, parent, options.shardSplitThreshold, options)\n } else {\n let dir = await parent.get(pathElem)\n\n if (!dir || !(dir instanceof Dir)) {\n dir = new DirFlat({\n root: false,\n dir: true,\n parent: parent,\n parentKey: pathElem,\n path: currentPath,\n dirty: true,\n flat: true,\n mtime: dir && dir.unixfs && dir.unixfs.mtime,\n mode: dir && dir.unixfs && dir.unixfs.mode\n }, options)\n }\n\n await parent.put(pathElem, dir)\n\n parent = dir\n }\n }\n\n return tree\n}\n\n/**\n * @param {Dir | InProgressImportResult} tree\n * @param {Blockstore} blockstore\n */\nasync function * flushAndYield (tree, blockstore) {\n if (!(tree instanceof Dir)) {\n if (tree && tree.unixfs && tree.unixfs.isDirectory()) {\n yield tree\n }\n\n return\n }\n\n yield * tree.flush(blockstore)\n}\n\n/**\n * @type {TreeBuilder}\n */\nasync function * treeBuilder (source, block, options) {\n /** @type {Dir} */\n let tree = new DirFlat({\n root: true,\n dir: true,\n path: '',\n dirty: true,\n flat: true\n }, options)\n\n for await (const entry of source) {\n if (!entry) {\n continue\n }\n\n tree = await addToTree(entry, tree, options)\n\n if (!entry.unixfs || !entry.unixfs.isDirectory()) {\n yield entry\n }\n }\n\n if (options.wrapWithDirectory) {\n yield * flushAndYield(tree, block)\n } else {\n for await (const unwrapped of tree.eachChildSeries()) {\n if (!unwrapped) {\n continue\n }\n\n yield * flushAndYield(unwrapped.child, block)\n }\n }\n}\n\nmodule.exports = treeBuilder\n","'use strict'\n\nconst { CID } = require('multiformats/cid')\nconst dagPb = require('@ipld/dag-pb')\nconst { sha256 } = require('multiformats/hashes/sha2')\n\n/**\n * @param {Uint8Array} buffer\n * @param {import('interface-blockstore').Blockstore} blockstore\n * @param {import('../types').PersistOptions} options\n */\nconst persist = async (buffer, blockstore, options) => {\n if (!options.codec) {\n options.codec = dagPb\n }\n\n if (!options.hasher) {\n options.hasher = sha256\n }\n\n if (options.cidVersion === undefined) {\n options.cidVersion = 1\n }\n\n if (options.codec === dagPb && options.hasher !== sha256) {\n options.cidVersion = 1\n }\n\n const multihash = await options.hasher.digest(buffer)\n const cid = CID.create(options.cidVersion, options.codec.code, multihash)\n\n if (!options.onlyHash) {\n await blockstore.put(cid, buffer, {\n signal: options.signal\n })\n }\n\n return cid\n}\n\nmodule.exports = persist\n","'use strict'\n\nconst toPathComponents = (path = '') => {\n // split on / unless escaped with \\\n return (path\n .trim()\n .match(/([^\\\\^/]|\\\\\\/)+/g) || [])\n .filter(Boolean)\n}\n\nmodule.exports = toPathComponents\n","'use strict'\n\nconst {\n Data: PBData\n} = require('./unixfs')\nconst errcode = require('err-code')\n\n/**\n * @typedef {import('./types').Mtime} Mtime\n * @typedef {import('./types').MtimeLike} MtimeLike\n */\n\nconst types = [\n 'raw',\n 'directory',\n 'file',\n 'metadata',\n 'symlink',\n 'hamt-sharded-directory'\n]\n\nconst dirTypes = [\n 'directory',\n 'hamt-sharded-directory'\n]\n\nconst DEFAULT_FILE_MODE = parseInt('0644', 8)\nconst DEFAULT_DIRECTORY_MODE = parseInt('0755', 8)\n\n/**\n * @param {string | number | undefined} [mode]\n */\nfunction parseMode (mode) {\n if (mode == null) {\n return undefined\n }\n\n if (typeof mode === 'number') {\n return mode & 0xFFF\n }\n\n mode = mode.toString()\n\n if (mode.substring(0, 1) === '0') {\n // octal string\n return parseInt(mode, 8) & 0xFFF\n }\n\n // decimal string\n return parseInt(mode, 10) & 0xFFF\n}\n\n/**\n * @param {any} input\n */\nfunction parseMtime (input) {\n if (input == null) {\n return undefined\n }\n\n /** @type {Mtime | undefined} */\n let mtime\n\n // { secs, nsecs }\n if (input.secs != null) {\n mtime = {\n secs: input.secs,\n nsecs: input.nsecs\n }\n }\n\n // UnixFS TimeSpec\n if (input.Seconds != null) {\n mtime = {\n secs: input.Seconds,\n nsecs: input.FractionalNanoseconds\n }\n }\n\n // process.hrtime()\n if (Array.isArray(input)) {\n mtime = {\n secs: input[0],\n nsecs: input[1]\n }\n }\n\n // Javascript Date\n if (input instanceof Date) {\n const ms = input.getTime()\n const secs = Math.floor(ms / 1000)\n\n mtime = {\n secs: secs,\n nsecs: (ms - (secs * 1000)) * 1000\n }\n }\n\n /*\n TODO: https://github.com/ipfs/aegir/issues/487\n\n // process.hrtime.bigint()\n if (input instanceof BigInt) {\n const secs = input / BigInt(1e9)\n const nsecs = input - (secs * BigInt(1e9))\n\n mtime = {\n secs: parseInt(secs.toString()),\n nsecs: parseInt(nsecs.toString())\n }\n }\n */\n\n if (!Object.prototype.hasOwnProperty.call(mtime, 'secs')) {\n return undefined\n }\n\n if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) {\n throw errcode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS')\n }\n\n return mtime\n}\n\nclass Data {\n /**\n * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md\n *\n * @param {Uint8Array} marshaled\n */\n static unmarshal (marshaled) {\n const message = PBData.decode(marshaled)\n const decoded = PBData.toObject(message, {\n defaults: false,\n arrays: true,\n longs: Number,\n objects: false\n })\n\n const data = new Data({\n type: types[decoded.Type],\n data: decoded.Data,\n blockSizes: decoded.blocksizes,\n mode: decoded.mode,\n mtime: decoded.mtime\n ? {\n secs: decoded.mtime.Seconds,\n nsecs: decoded.mtime.FractionalNanoseconds\n }\n : undefined\n })\n\n // make sure we honour the original mode\n data._originalMode = decoded.mode || 0\n\n return data\n }\n\n /**\n * @param {object} [options]\n * @param {string} [options.type='file']\n * @param {Uint8Array} [options.data]\n * @param {number[]} [options.blockSizes]\n * @param {number} [options.hashType]\n * @param {number} [options.fanout]\n * @param {MtimeLike | null} [options.mtime]\n * @param {number | string} [options.mode]\n */\n constructor (options = {\n type: 'file'\n }) {\n const {\n type,\n data,\n blockSizes,\n hashType,\n fanout,\n mtime,\n mode\n } = options\n\n if (type && !types.includes(type)) {\n throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')\n }\n\n this.type = type || 'file'\n this.data = data\n this.hashType = hashType\n this.fanout = fanout\n\n /** @type {number[]} */\n this.blockSizes = blockSizes || []\n this._originalMode = 0\n this.mode = parseMode(mode)\n\n if (mtime) {\n this.mtime = parseMtime(mtime)\n\n if (this.mtime && !this.mtime.nsecs) {\n this.mtime.nsecs = 0\n }\n }\n }\n\n /**\n * @param {number | undefined} mode\n */\n set mode (mode) {\n this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE\n\n const parsedMode = parseMode(mode)\n\n if (parsedMode !== undefined) {\n this._mode = parsedMode\n }\n }\n\n /**\n * @returns {number | undefined}\n */\n get mode () {\n return this._mode\n }\n\n isDirectory () {\n return Boolean(this.type && dirTypes.includes(this.type))\n }\n\n /**\n * @param {number} size\n */\n addBlockSize (size) {\n this.blockSizes.push(size)\n }\n\n /**\n * @param {number} index\n */\n removeBlockSize (index) {\n this.blockSizes.splice(index, 1)\n }\n\n /**\n * Returns `0` for directories or `data.length + sum(blockSizes)` for everything else\n */\n fileSize () {\n if (this.isDirectory()) {\n // dirs don't have file size\n return 0\n }\n\n let sum = 0\n this.blockSizes.forEach((size) => {\n sum += size\n })\n\n if (this.data) {\n sum += this.data.length\n }\n\n return sum\n }\n\n /**\n * encode to protobuf Uint8Array\n */\n marshal () {\n let type\n\n switch (this.type) {\n case 'raw': type = PBData.DataType.Raw; break\n case 'directory': type = PBData.DataType.Directory; break\n case 'file': type = PBData.DataType.File; break\n case 'metadata': type = PBData.DataType.Metadata; break\n case 'symlink': type = PBData.DataType.Symlink; break\n case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break\n default:\n throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE')\n }\n\n let data = this.data\n\n if (!this.data || !this.data.length) {\n data = undefined\n }\n\n let mode\n\n if (this.mode != null) {\n mode = (this._originalMode & 0xFFFFF000) | (parseMode(this.mode) || 0)\n\n if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {\n mode = undefined\n }\n\n if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {\n mode = undefined\n }\n }\n\n let mtime\n\n if (this.mtime != null) {\n const parsed = parseMtime(this.mtime)\n\n if (parsed) {\n mtime = {\n Seconds: parsed.secs,\n FractionalNanoseconds: parsed.nsecs\n }\n\n if (mtime.FractionalNanoseconds === 0) {\n delete mtime.FractionalNanoseconds\n }\n }\n }\n\n const pbData = {\n Type: type,\n Data: data,\n filesize: this.isDirectory() ? undefined : this.fileSize(),\n blocksizes: this.blockSizes,\n hashType: this.hashType,\n fanout: this.fanout,\n mode,\n mtime\n }\n\n return PBData.encode(pbData).finish()\n }\n}\n\nmodule.exports = {\n UnixFS: Data,\n parseMode,\n parseMtime\n}\n","/*eslint-disable*/\n\"use strict\";\n\nvar $protobuf = require(\"protobufjs/minimal\");\n\n// Common aliases\nvar $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;\n\n// Exported root namespace\nvar $root = $protobuf.roots[\"ipfs-unixfs\"] || ($protobuf.roots[\"ipfs-unixfs\"] = {});\n\n$root.Data = (function() {\n\n /**\n * Properties of a Data.\n * @exports IData\n * @interface IData\n * @property {Data.DataType} Type Data Type\n * @property {Uint8Array|null} [Data] Data Data\n * @property {number|null} [filesize] Data filesize\n * @property {Array.|null} [blocksizes] Data blocksizes\n * @property {number|null} [hashType] Data hashType\n * @property {number|null} [fanout] Data fanout\n * @property {number|null} [mode] Data mode\n * @property {IUnixTime|null} [mtime] Data mtime\n */\n\n /**\n * Constructs a new Data.\n * @exports Data\n * @classdesc Represents a Data.\n * @implements IData\n * @constructor\n * @param {IData=} [p] Properties to set\n */\n function Data(p) {\n this.blocksizes = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * Data Type.\n * @member {Data.DataType} Type\n * @memberof Data\n * @instance\n */\n Data.prototype.Type = 0;\n\n /**\n * Data Data.\n * @member {Uint8Array} Data\n * @memberof Data\n * @instance\n */\n Data.prototype.Data = $util.newBuffer([]);\n\n /**\n * Data filesize.\n * @member {number} filesize\n * @memberof Data\n * @instance\n */\n Data.prototype.filesize = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data blocksizes.\n * @member {Array.} blocksizes\n * @memberof Data\n * @instance\n */\n Data.prototype.blocksizes = $util.emptyArray;\n\n /**\n * Data hashType.\n * @member {number} hashType\n * @memberof Data\n * @instance\n */\n Data.prototype.hashType = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data fanout.\n * @member {number} fanout\n * @memberof Data\n * @instance\n */\n Data.prototype.fanout = $util.Long ? $util.Long.fromBits(0,0,true) : 0;\n\n /**\n * Data mode.\n * @member {number} mode\n * @memberof Data\n * @instance\n */\n Data.prototype.mode = 0;\n\n /**\n * Data mtime.\n * @member {IUnixTime|null|undefined} mtime\n * @memberof Data\n * @instance\n */\n Data.prototype.mtime = null;\n\n /**\n * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages.\n * @function encode\n * @memberof Data\n * @static\n * @param {IData} m Data message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Data.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n w.uint32(8).int32(m.Type);\n if (m.Data != null && Object.hasOwnProperty.call(m, \"Data\"))\n w.uint32(18).bytes(m.Data);\n if (m.filesize != null && Object.hasOwnProperty.call(m, \"filesize\"))\n w.uint32(24).uint64(m.filesize);\n if (m.blocksizes != null && m.blocksizes.length) {\n for (var i = 0; i < m.blocksizes.length; ++i)\n w.uint32(32).uint64(m.blocksizes[i]);\n }\n if (m.hashType != null && Object.hasOwnProperty.call(m, \"hashType\"))\n w.uint32(40).uint64(m.hashType);\n if (m.fanout != null && Object.hasOwnProperty.call(m, \"fanout\"))\n w.uint32(48).uint64(m.fanout);\n if (m.mode != null && Object.hasOwnProperty.call(m, \"mode\"))\n w.uint32(56).uint32(m.mode);\n if (m.mtime != null && Object.hasOwnProperty.call(m, \"mtime\"))\n $root.UnixTime.encode(m.mtime, w.uint32(66).fork()).ldelim();\n return w;\n };\n\n /**\n * Decodes a Data message from the specified reader or buffer.\n * @function decode\n * @memberof Data\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {Data} Data\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Data.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.Data();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.Type = r.int32();\n break;\n case 2:\n m.Data = r.bytes();\n break;\n case 3:\n m.filesize = r.uint64();\n break;\n case 4:\n if (!(m.blocksizes && m.blocksizes.length))\n m.blocksizes = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.blocksizes.push(r.uint64());\n } else\n m.blocksizes.push(r.uint64());\n break;\n case 5:\n m.hashType = r.uint64();\n break;\n case 6:\n m.fanout = r.uint64();\n break;\n case 7:\n m.mode = r.uint32();\n break;\n case 8:\n m.mtime = $root.UnixTime.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n if (!m.hasOwnProperty(\"Type\"))\n throw $util.ProtocolError(\"missing required 'Type'\", { instance: m });\n return m;\n };\n\n /**\n * Creates a Data message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof Data\n * @static\n * @param {Object.} d Plain object\n * @returns {Data} Data\n */\n Data.fromObject = function fromObject(d) {\n if (d instanceof $root.Data)\n return d;\n var m = new $root.Data();\n switch (d.Type) {\n case \"Raw\":\n case 0:\n m.Type = 0;\n break;\n case \"Directory\":\n case 1:\n m.Type = 1;\n break;\n case \"File\":\n case 2:\n m.Type = 2;\n break;\n case \"Metadata\":\n case 3:\n m.Type = 3;\n break;\n case \"Symlink\":\n case 4:\n m.Type = 4;\n break;\n case \"HAMTShard\":\n case 5:\n m.Type = 5;\n break;\n }\n if (d.Data != null) {\n if (typeof d.Data === \"string\")\n $util.base64.decode(d.Data, m.Data = $util.newBuffer($util.base64.length(d.Data)), 0);\n else if (d.Data.length)\n m.Data = d.Data;\n }\n if (d.filesize != null) {\n if ($util.Long)\n (m.filesize = $util.Long.fromValue(d.filesize)).unsigned = true;\n else if (typeof d.filesize === \"string\")\n m.filesize = parseInt(d.filesize, 10);\n else if (typeof d.filesize === \"number\")\n m.filesize = d.filesize;\n else if (typeof d.filesize === \"object\")\n m.filesize = new $util.LongBits(d.filesize.low >>> 0, d.filesize.high >>> 0).toNumber(true);\n }\n if (d.blocksizes) {\n if (!Array.isArray(d.blocksizes))\n throw TypeError(\".Data.blocksizes: array expected\");\n m.blocksizes = [];\n for (var i = 0; i < d.blocksizes.length; ++i) {\n if ($util.Long)\n (m.blocksizes[i] = $util.Long.fromValue(d.blocksizes[i])).unsigned = true;\n else if (typeof d.blocksizes[i] === \"string\")\n m.blocksizes[i] = parseInt(d.blocksizes[i], 10);\n else if (typeof d.blocksizes[i] === \"number\")\n m.blocksizes[i] = d.blocksizes[i];\n else if (typeof d.blocksizes[i] === \"object\")\n m.blocksizes[i] = new $util.LongBits(d.blocksizes[i].low >>> 0, d.blocksizes[i].high >>> 0).toNumber(true);\n }\n }\n if (d.hashType != null) {\n if ($util.Long)\n (m.hashType = $util.Long.fromValue(d.hashType)).unsigned = true;\n else if (typeof d.hashType === \"string\")\n m.hashType = parseInt(d.hashType, 10);\n else if (typeof d.hashType === \"number\")\n m.hashType = d.hashType;\n else if (typeof d.hashType === \"object\")\n m.hashType = new $util.LongBits(d.hashType.low >>> 0, d.hashType.high >>> 0).toNumber(true);\n }\n if (d.fanout != null) {\n if ($util.Long)\n (m.fanout = $util.Long.fromValue(d.fanout)).unsigned = true;\n else if (typeof d.fanout === \"string\")\n m.fanout = parseInt(d.fanout, 10);\n else if (typeof d.fanout === \"number\")\n m.fanout = d.fanout;\n else if (typeof d.fanout === \"object\")\n m.fanout = new $util.LongBits(d.fanout.low >>> 0, d.fanout.high >>> 0).toNumber(true);\n }\n if (d.mode != null) {\n m.mode = d.mode >>> 0;\n }\n if (d.mtime != null) {\n if (typeof d.mtime !== \"object\")\n throw TypeError(\".Data.mtime: object expected\");\n m.mtime = $root.UnixTime.fromObject(d.mtime);\n }\n return m;\n };\n\n /**\n * Creates a plain object from a Data message. Also converts values to other types if specified.\n * @function toObject\n * @memberof Data\n * @static\n * @param {Data} m Data\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n Data.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.arrays || o.defaults) {\n d.blocksizes = [];\n }\n if (o.defaults) {\n d.Type = o.enums === String ? \"Raw\" : 0;\n if (o.bytes === String)\n d.Data = \"\";\n else {\n d.Data = [];\n if (o.bytes !== Array)\n d.Data = $util.newBuffer(d.Data);\n }\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.filesize = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.filesize = o.longs === String ? \"0\" : 0;\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.hashType = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.hashType = o.longs === String ? \"0\" : 0;\n if ($util.Long) {\n var n = new $util.Long(0, 0, true);\n d.fanout = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.fanout = o.longs === String ? \"0\" : 0;\n d.mode = 0;\n d.mtime = null;\n }\n if (m.Type != null && m.hasOwnProperty(\"Type\")) {\n d.Type = o.enums === String ? $root.Data.DataType[m.Type] : m.Type;\n }\n if (m.Data != null && m.hasOwnProperty(\"Data\")) {\n d.Data = o.bytes === String ? $util.base64.encode(m.Data, 0, m.Data.length) : o.bytes === Array ? Array.prototype.slice.call(m.Data) : m.Data;\n }\n if (m.filesize != null && m.hasOwnProperty(\"filesize\")) {\n if (typeof m.filesize === \"number\")\n d.filesize = o.longs === String ? String(m.filesize) : m.filesize;\n else\n d.filesize = o.longs === String ? $util.Long.prototype.toString.call(m.filesize) : o.longs === Number ? new $util.LongBits(m.filesize.low >>> 0, m.filesize.high >>> 0).toNumber(true) : m.filesize;\n }\n if (m.blocksizes && m.blocksizes.length) {\n d.blocksizes = [];\n for (var j = 0; j < m.blocksizes.length; ++j) {\n if (typeof m.blocksizes[j] === \"number\")\n d.blocksizes[j] = o.longs === String ? String(m.blocksizes[j]) : m.blocksizes[j];\n else\n d.blocksizes[j] = o.longs === String ? $util.Long.prototype.toString.call(m.blocksizes[j]) : o.longs === Number ? new $util.LongBits(m.blocksizes[j].low >>> 0, m.blocksizes[j].high >>> 0).toNumber(true) : m.blocksizes[j];\n }\n }\n if (m.hashType != null && m.hasOwnProperty(\"hashType\")) {\n if (typeof m.hashType === \"number\")\n d.hashType = o.longs === String ? String(m.hashType) : m.hashType;\n else\n d.hashType = o.longs === String ? $util.Long.prototype.toString.call(m.hashType) : o.longs === Number ? new $util.LongBits(m.hashType.low >>> 0, m.hashType.high >>> 0).toNumber(true) : m.hashType;\n }\n if (m.fanout != null && m.hasOwnProperty(\"fanout\")) {\n if (typeof m.fanout === \"number\")\n d.fanout = o.longs === String ? String(m.fanout) : m.fanout;\n else\n d.fanout = o.longs === String ? $util.Long.prototype.toString.call(m.fanout) : o.longs === Number ? new $util.LongBits(m.fanout.low >>> 0, m.fanout.high >>> 0).toNumber(true) : m.fanout;\n }\n if (m.mode != null && m.hasOwnProperty(\"mode\")) {\n d.mode = m.mode;\n }\n if (m.mtime != null && m.hasOwnProperty(\"mtime\")) {\n d.mtime = $root.UnixTime.toObject(m.mtime, o);\n }\n return d;\n };\n\n /**\n * Converts this Data to JSON.\n * @function toJSON\n * @memberof Data\n * @instance\n * @returns {Object.} JSON object\n */\n Data.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * DataType enum.\n * @name Data.DataType\n * @enum {number}\n * @property {number} Raw=0 Raw value\n * @property {number} Directory=1 Directory value\n * @property {number} File=2 File value\n * @property {number} Metadata=3 Metadata value\n * @property {number} Symlink=4 Symlink value\n * @property {number} HAMTShard=5 HAMTShard value\n */\n Data.DataType = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"Raw\"] = 0;\n values[valuesById[1] = \"Directory\"] = 1;\n values[valuesById[2] = \"File\"] = 2;\n values[valuesById[3] = \"Metadata\"] = 3;\n values[valuesById[4] = \"Symlink\"] = 4;\n values[valuesById[5] = \"HAMTShard\"] = 5;\n return values;\n })();\n\n return Data;\n})();\n\n$root.UnixTime = (function() {\n\n /**\n * Properties of an UnixTime.\n * @exports IUnixTime\n * @interface IUnixTime\n * @property {number} Seconds UnixTime Seconds\n * @property {number|null} [FractionalNanoseconds] UnixTime FractionalNanoseconds\n */\n\n /**\n * Constructs a new UnixTime.\n * @exports UnixTime\n * @classdesc Represents an UnixTime.\n * @implements IUnixTime\n * @constructor\n * @param {IUnixTime=} [p] Properties to set\n */\n function UnixTime(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * UnixTime Seconds.\n * @member {number} Seconds\n * @memberof UnixTime\n * @instance\n */\n UnixTime.prototype.Seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * UnixTime FractionalNanoseconds.\n * @member {number} FractionalNanoseconds\n * @memberof UnixTime\n * @instance\n */\n UnixTime.prototype.FractionalNanoseconds = 0;\n\n /**\n * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages.\n * @function encode\n * @memberof UnixTime\n * @static\n * @param {IUnixTime} m UnixTime message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n UnixTime.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n w.uint32(8).int64(m.Seconds);\n if (m.FractionalNanoseconds != null && Object.hasOwnProperty.call(m, \"FractionalNanoseconds\"))\n w.uint32(21).fixed32(m.FractionalNanoseconds);\n return w;\n };\n\n /**\n * Decodes an UnixTime message from the specified reader or buffer.\n * @function decode\n * @memberof UnixTime\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {UnixTime} UnixTime\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n UnixTime.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.UnixTime();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.Seconds = r.int64();\n break;\n case 2:\n m.FractionalNanoseconds = r.fixed32();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n if (!m.hasOwnProperty(\"Seconds\"))\n throw $util.ProtocolError(\"missing required 'Seconds'\", { instance: m });\n return m;\n };\n\n /**\n * Creates an UnixTime message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof UnixTime\n * @static\n * @param {Object.} d Plain object\n * @returns {UnixTime} UnixTime\n */\n UnixTime.fromObject = function fromObject(d) {\n if (d instanceof $root.UnixTime)\n return d;\n var m = new $root.UnixTime();\n if (d.Seconds != null) {\n if ($util.Long)\n (m.Seconds = $util.Long.fromValue(d.Seconds)).unsigned = false;\n else if (typeof d.Seconds === \"string\")\n m.Seconds = parseInt(d.Seconds, 10);\n else if (typeof d.Seconds === \"number\")\n m.Seconds = d.Seconds;\n else if (typeof d.Seconds === \"object\")\n m.Seconds = new $util.LongBits(d.Seconds.low >>> 0, d.Seconds.high >>> 0).toNumber();\n }\n if (d.FractionalNanoseconds != null) {\n m.FractionalNanoseconds = d.FractionalNanoseconds >>> 0;\n }\n return m;\n };\n\n /**\n * Creates a plain object from an UnixTime message. Also converts values to other types if specified.\n * @function toObject\n * @memberof UnixTime\n * @static\n * @param {UnixTime} m UnixTime\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n UnixTime.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.defaults) {\n if ($util.Long) {\n var n = new $util.Long(0, 0, false);\n d.Seconds = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;\n } else\n d.Seconds = o.longs === String ? \"0\" : 0;\n d.FractionalNanoseconds = 0;\n }\n if (m.Seconds != null && m.hasOwnProperty(\"Seconds\")) {\n if (typeof m.Seconds === \"number\")\n d.Seconds = o.longs === String ? String(m.Seconds) : m.Seconds;\n else\n d.Seconds = o.longs === String ? $util.Long.prototype.toString.call(m.Seconds) : o.longs === Number ? new $util.LongBits(m.Seconds.low >>> 0, m.Seconds.high >>> 0).toNumber() : m.Seconds;\n }\n if (m.FractionalNanoseconds != null && m.hasOwnProperty(\"FractionalNanoseconds\")) {\n d.FractionalNanoseconds = m.FractionalNanoseconds;\n }\n return d;\n };\n\n /**\n * Converts this UnixTime to JSON.\n * @function toJSON\n * @memberof UnixTime\n * @instance\n * @returns {Object.} JSON object\n */\n UnixTime.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n return UnixTime;\n})();\n\n$root.Metadata = (function() {\n\n /**\n * Properties of a Metadata.\n * @exports IMetadata\n * @interface IMetadata\n * @property {string|null} [MimeType] Metadata MimeType\n */\n\n /**\n * Constructs a new Metadata.\n * @exports Metadata\n * @classdesc Represents a Metadata.\n * @implements IMetadata\n * @constructor\n * @param {IMetadata=} [p] Properties to set\n */\n function Metadata(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n /**\n * Metadata MimeType.\n * @member {string} MimeType\n * @memberof Metadata\n * @instance\n */\n Metadata.prototype.MimeType = \"\";\n\n /**\n * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.\n * @function encode\n * @memberof Metadata\n * @static\n * @param {IMetadata} m Metadata message or plain object to encode\n * @param {$protobuf.Writer} [w] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Metadata.encode = function encode(m, w) {\n if (!w)\n w = $Writer.create();\n if (m.MimeType != null && Object.hasOwnProperty.call(m, \"MimeType\"))\n w.uint32(10).string(m.MimeType);\n return w;\n };\n\n /**\n * Decodes a Metadata message from the specified reader or buffer.\n * @function decode\n * @memberof Metadata\n * @static\n * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from\n * @param {number} [l] Message length if known beforehand\n * @returns {Metadata} Metadata\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Metadata.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.MimeType = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n /**\n * Creates a Metadata message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof Metadata\n * @static\n * @param {Object.} d Plain object\n * @returns {Metadata} Metadata\n */\n Metadata.fromObject = function fromObject(d) {\n if (d instanceof $root.Metadata)\n return d;\n var m = new $root.Metadata();\n if (d.MimeType != null) {\n m.MimeType = String(d.MimeType);\n }\n return m;\n };\n\n /**\n * Creates a plain object from a Metadata message. Also converts values to other types if specified.\n * @function toObject\n * @memberof Metadata\n * @static\n * @param {Metadata} m Metadata\n * @param {$protobuf.IConversionOptions} [o] Conversion options\n * @returns {Object.} Plain object\n */\n Metadata.toObject = function toObject(m, o) {\n if (!o)\n o = {};\n var d = {};\n if (o.defaults) {\n d.MimeType = \"\";\n }\n if (m.MimeType != null && m.hasOwnProperty(\"MimeType\")) {\n d.MimeType = m.MimeType;\n }\n return d;\n };\n\n /**\n * Converts this Metadata to JSON.\n * @function toJSON\n * @memberof Metadata\n * @instance\n * @returns {Object.} JSON object\n */\n Metadata.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n return Metadata;\n})();\n\nmodule.exports = $root;\n","'use strict';\n\nmodule.exports = value => {\n\tif (Object.prototype.toString.call(value) !== '[object Object]') {\n\t\treturn false;\n\t}\n\n\tconst prototype = Object.getPrototypeOf(value);\n\treturn prototype === null || prototype === Object.prototype;\n};\n","'use strict'\n\n/**\n * Collects all values from an (async) iterable into an array and returns it.\n *\n * @template T\n * @param {AsyncIterable|Iterable} source\n */\nconst all = async (source) => {\n const arr = []\n\n for await (const entry of source) {\n arr.push(entry)\n }\n\n return arr\n}\n\nmodule.exports = all\n","'use strict'\n\n/**\n * Takes an (async) iterable that emits things and returns an async iterable that\n * emits those things in fixed-sized batches.\n *\n * @template T\n * @param {AsyncIterable|Iterable} source\n * @param {number} [size=1]\n * @returns {AsyncIterable}\n */\nasync function * batch (source, size = 1) {\n /** @type {T[]} */\n let things = []\n\n if (size < 1) {\n size = 1\n }\n\n for await (const thing of source) {\n things.push(thing)\n\n while (things.length >= size) {\n yield things.slice(0, size)\n\n things = things.slice(size)\n }\n }\n\n while (things.length) {\n yield things.slice(0, size)\n\n things = things.slice(size)\n }\n}\n\nmodule.exports = batch\n","'use strict'\n\n/**\n * Drains an (async) iterable discarding its' content and does not return\n * anything.\n *\n * @template T\n * @param {AsyncIterable|Iterable} source\n * @returns {Promise}\n */\nconst drain = async (source) => {\n for await (const _ of source) { } // eslint-disable-line no-unused-vars,no-empty\n}\n\nmodule.exports = drain\n","'use strict'\n\n/**\n * Filters the passed (async) iterable by using the filter function\n *\n * @template T\n * @param {AsyncIterable|Iterable} source\n * @param {function(T):boolean|Promise} fn\n */\nconst filter = async function * (source, fn) {\n for await (const entry of source) {\n if (await fn(entry)) {\n yield entry\n }\n }\n}\n\nmodule.exports = filter\n","'use strict'\n\nconst fs = require('fs').promises\nconst path = require('path')\nconst minimatch = require('minimatch')\n\n/**\n * @typedef {string} Glob\n * @typedef {Object} OptionsExt\n * @property {Glob[]} [ignore] - Glob patterns to ignore\n * @property {string} [cwd=process.cwd()]\n * @property {boolean} [absolute=false] - If true produces absolute paths\n * @property {boolean} [nodir] - If true yields file paths and skip directories\n *\n * @typedef {OptionsExt & minimatch.IOptions} Options\n */\n\n/**\n * Async iterable filename pattern matcher\n *\n * @param {string} dir\n * @param {string} pattern\n * @param {Options} [options]\n * @returns {AsyncIterable}\n */\nasync function * glob (dir, pattern, options = {}) {\n const absoluteDir = path.resolve(dir)\n const relativeDir = path.relative(options.cwd || process.cwd(), dir)\n\n const stats = await fs.stat(absoluteDir)\n\n if (stats.isDirectory()) {\n for await (const entry of _glob(absoluteDir, '', pattern, options)) {\n yield entry\n }\n\n return\n }\n\n if (minimatch(relativeDir, pattern, options)) {\n yield options.absolute ? absoluteDir : relativeDir\n }\n}\n\n/**\n * @param {string} base\n * @param {string} dir\n * @param {Glob} pattern\n * @param {Options} options\n * @returns {AsyncIterable}\n */\nasync function * _glob (base, dir, pattern, options) {\n for await (const entry of await fs.readdir(path.join(base, dir))) {\n const relativeEntryPath = path.join(dir, entry)\n const absoluteEntryPath = path.join(base, dir, entry)\n const stats = await fs.stat(absoluteEntryPath)\n let match = minimatch(relativeEntryPath, pattern, options)\n\n if (options.ignore && match && options.ignore.reduce((acc, curr) => {\n return acc || minimatch(relativeEntryPath, curr, options)\n }, false)) {\n match = false\n }\n\n if (match && !(stats.isDirectory() && options.nodir)) {\n yield options.absolute ? absoluteEntryPath : relativeEntryPath\n }\n\n if (stats.isDirectory()) {\n yield * _glob(base, relativeEntryPath, pattern, options)\n }\n }\n}\n\nmodule.exports = glob\n","'use strict'\n\n/**\n * Returns the last item of an (async) iterable, unless empty, in which case\n * return `undefined`.\n *\n * @template T\n * @param {AsyncIterable|Iterable} source\n */\nconst last = async (source) => {\n let res\n\n for await (const entry of source) {\n res = entry\n }\n\n return res\n}\n\nmodule.exports = last\n","'use strict'\n\n/**\n * Takes an (async) iterable and returns one with each item mapped by the passed\n * function.\n *\n * @template I,O\n * @param {AsyncIterable|Iterable} source\n * @param {function(I):O|Promise} func\n * @returns {AsyncIterable}\n */\nconst map = async function * (source, func) {\n for await (const val of source) {\n yield func(val)\n }\n}\n\nmodule.exports = map\n","'use strict'\n\nconst batch = require('it-batch')\n\n/**\n * @template T\n * @typedef {{ok:true, value:T}} Success\n */\n\n/**\n * @typedef {{ok:false, err:Error}} Failure\n */\n\n/**\n * Takes an (async) iterator that emits promise-returning functions,\n * invokes them in parallel and emits the results as they become available but\n * in the same order as the input\n *\n * @template T\n * @param {AsyncIterable<() => Promise>} source\n * @param {number} [size=1]\n * @returns {AsyncIterable}\n */\nasync function * parallelBatch (source, size = 1) {\n for await (const tasks of batch(source, size)) {\n /** @type {Promise|Failure>[]} */\n const things = tasks.map(\n /**\n * @param {() => Promise} p\n */\n p => {\n return p().then(value => ({ ok: true, value }), err => ({ ok: false, err }))\n })\n\n for (let i = 0; i < things.length; i++) {\n const result = await things[i]\n\n if (result.ok) {\n yield result.value\n } else {\n throw result.err\n }\n }\n }\n}\n\nmodule.exports = parallelBatch\n","'use strict'\n\n/**\n * @template T\n * @typedef {Object} Peek\n * @property {() => IteratorResult} peek\n */\n\n/**\n * @template T\n * @typedef {Object} AsyncPeek\n * @property {() => Promise>} peek\n */\n\n/**\n * @template T\n * @typedef {Object} Push\n * @property {(value:T) => void} push\n */\n\n/**\n * @template T\n * @typedef {Iterable & Peek & Push & Iterator} Peekable\n */\n\n/**\n * @template T\n * @typedef {AsyncIterable & AsyncPeek & Push & AsyncIterator} AsyncPeekable\n */\n\n/**\n * @template {Iterable | AsyncIterable} I\n * @param {I} iterable\n * @returns {I extends Iterable\n * ? Peekable\n * : I extends AsyncIterable\n * ? AsyncPeekable\n * : never\n * }\n */\nfunction peekableIterator (iterable) {\n // @ts-ignore\n const [iterator, symbol] = iterable[Symbol.asyncIterator]\n // @ts-ignore\n ? [iterable[Symbol.asyncIterator](), Symbol.asyncIterator]\n // @ts-ignore\n : [iterable[Symbol.iterator](), Symbol.iterator]\n\n /** @type {any[]} */\n const queue = []\n\n // @ts-ignore\n return {\n peek: () => {\n return iterator.next()\n },\n push: (value) => {\n queue.push(value)\n },\n next: () => {\n if (queue.length) {\n return {\n done: false,\n value: queue.shift()\n }\n }\n\n return iterator.next()\n },\n [symbol] () {\n return this\n }\n }\n}\n\nmodule.exports = peekableIterator\n","const rawPipe = (...fns) => {\n let res\n while (fns.length) {\n res = fns.shift()(res)\n }\n return res\n}\n\nconst isIterable = obj => obj && (\n typeof obj[Symbol.asyncIterator] === 'function' ||\n typeof obj[Symbol.iterator] === 'function' ||\n typeof obj.next === 'function' // Probably, right?\n)\n\nconst isDuplex = obj => obj && typeof obj.sink === 'function' && isIterable(obj.source)\n\nconst duplexPipelineFn = duplex => source => {\n duplex.sink(source) // TODO: error on sink side is unhandled rejection - this is the same as pull streams\n return duplex.source\n}\n\nconst pipe = (...fns) => {\n // Duplex at start: wrap in function and return duplex source\n if (isDuplex(fns[0])) {\n const duplex = fns[0]\n fns[0] = () => duplex.source\n // Iterable at start: wrap in function\n } else if (isIterable(fns[0])) {\n const source = fns[0]\n fns[0] = () => source\n }\n\n if (fns.length > 1) {\n // Duplex at end: use duplex sink\n if (isDuplex(fns[fns.length - 1])) {\n fns[fns.length - 1] = fns[fns.length - 1].sink\n }\n }\n\n if (fns.length > 2) {\n // Duplex in the middle, consume source with duplex sink and return duplex source\n for (let i = 1; i < fns.length - 1; i++) {\n if (isDuplex(fns[i])) {\n fns[i] = duplexPipelineFn(fns[i])\n }\n }\n }\n\n return rawPipe(...fns)\n}\n\nmodule.exports = pipe\nmodule.exports.pipe = pipe\nmodule.exports.rawPipe = rawPipe\nmodule.exports.isIterable = isIterable\nmodule.exports.isDuplex = isDuplex\n","'use strict'\n\n/**\n * Stop iteration after n items have been received.\n *\n * @template T\n * @param {AsyncIterable|Iterable} source\n * @param {number} limit\n * @returns {AsyncIterable}\n */\nconst take = async function * (source, limit) {\n let items = 0\n\n if (limit < 1) {\n return\n }\n\n for await (const entry of source) {\n yield entry\n\n items++\n\n if (items === limit) {\n return\n }\n }\n}\n\nmodule.exports = take\n","'use strict';\nconst isOptionObject = require('is-plain-obj');\n\nconst {hasOwnProperty} = Object.prototype;\nconst {propertyIsEnumerable} = Object;\nconst defineProperty = (object, name, value) => Object.defineProperty(object, name, {\n\tvalue,\n\twritable: true,\n\tenumerable: true,\n\tconfigurable: true\n});\n\nconst globalThis = this;\nconst defaultMergeOptions = {\n\tconcatArrays: false,\n\tignoreUndefined: false\n};\n\nconst getEnumerableOwnPropertyKeys = value => {\n\tconst keys = [];\n\n\tfor (const key in value) {\n\t\tif (hasOwnProperty.call(value, key)) {\n\t\t\tkeys.push(key);\n\t\t}\n\t}\n\n\t/* istanbul ignore else */\n\tif (Object.getOwnPropertySymbols) {\n\t\tconst symbols = Object.getOwnPropertySymbols(value);\n\n\t\tfor (const symbol of symbols) {\n\t\t\tif (propertyIsEnumerable.call(value, symbol)) {\n\t\t\t\tkeys.push(symbol);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn keys;\n};\n\nfunction clone(value) {\n\tif (Array.isArray(value)) {\n\t\treturn cloneArray(value);\n\t}\n\n\tif (isOptionObject(value)) {\n\t\treturn cloneOptionObject(value);\n\t}\n\n\treturn value;\n}\n\nfunction cloneArray(array) {\n\tconst result = array.slice(0, 0);\n\n\tgetEnumerableOwnPropertyKeys(array).forEach(key => {\n\t\tdefineProperty(result, key, clone(array[key]));\n\t});\n\n\treturn result;\n}\n\nfunction cloneOptionObject(object) {\n\tconst result = Object.getPrototypeOf(object) === null ? Object.create(null) : {};\n\n\tgetEnumerableOwnPropertyKeys(object).forEach(key => {\n\t\tdefineProperty(result, key, clone(object[key]));\n\t});\n\n\treturn result;\n}\n\n/**\n * @param {*} merged already cloned\n * @param {*} source something to merge\n * @param {string[]} keys keys to merge\n * @param {Object} config Config Object\n * @returns {*} cloned Object\n */\nconst mergeKeys = (merged, source, keys, config) => {\n\tkeys.forEach(key => {\n\t\tif (typeof source[key] === 'undefined' && config.ignoreUndefined) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Do not recurse into prototype chain of merged\n\t\tif (key in merged && merged[key] !== Object.getPrototypeOf(merged)) {\n\t\t\tdefineProperty(merged, key, merge(merged[key], source[key], config));\n\t\t} else {\n\t\t\tdefineProperty(merged, key, clone(source[key]));\n\t\t}\n\t});\n\n\treturn merged;\n};\n\n/**\n * @param {*} merged already cloned\n * @param {*} source something to merge\n * @param {Object} config Config Object\n * @returns {*} cloned Object\n *\n * see [Array.prototype.concat ( ...arguments )](http://www.ecma-international.org/ecma-262/6.0/#sec-array.prototype.concat)\n */\nconst concatArrays = (merged, source, config) => {\n\tlet result = merged.slice(0, 0);\n\tlet resultIndex = 0;\n\n\t[merged, source].forEach(array => {\n\t\tconst indices = [];\n\n\t\t// `result.concat(array)` with cloning\n\t\tfor (let k = 0; k < array.length; k++) {\n\t\t\tif (!hasOwnProperty.call(array, k)) {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tindices.push(String(k));\n\n\t\t\tif (array === merged) {\n\t\t\t\t// Already cloned\n\t\t\t\tdefineProperty(result, resultIndex++, array[k]);\n\t\t\t} else {\n\t\t\t\tdefineProperty(result, resultIndex++, clone(array[k]));\n\t\t\t}\n\t\t}\n\n\t\t// Merge non-index keys\n\t\tresult = mergeKeys(result, array, getEnumerableOwnPropertyKeys(array).filter(key => !indices.includes(key)), config);\n\t});\n\n\treturn result;\n};\n\n/**\n * @param {*} merged already cloned\n * @param {*} source something to merge\n * @param {Object} config Config Object\n * @returns {*} cloned Object\n */\nfunction merge(merged, source, config) {\n\tif (config.concatArrays && Array.isArray(merged) && Array.isArray(source)) {\n\t\treturn concatArrays(merged, source, config);\n\t}\n\n\tif (!isOptionObject(source) || !isOptionObject(merged)) {\n\t\treturn clone(source);\n\t}\n\n\treturn mergeKeys(merged, source, getEnumerableOwnPropertyKeys(source), config);\n}\n\nmodule.exports = function (...options) {\n\tconst config = merge(clone(defaultMergeOptions), (this !== globalThis && this) || {}, defaultMergeOptions);\n\tlet merged = {_: {}};\n\n\tfor (const option of options) {\n\t\tif (option === undefined) {\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (!isOptionObject(option)) {\n\t\t\tthrow new TypeError('`' + option + '` is not an Option Object');\n\t\t}\n\n\t\tmerged = merge(merged, {_: option}, config);\n\t}\n\n\treturn merged._;\n};\n","module.exports = minimatch\nminimatch.Minimatch = Minimatch\n\nvar path = { sep: '/' }\ntry {\n path = require('path')\n} catch (er) {}\n\nvar GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}\nvar expand = require('brace-expansion')\n\nvar plTypes = {\n '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},\n '?': { open: '(?:', close: ')?' },\n '+': { open: '(?:', close: ')+' },\n '*': { open: '(?:', close: ')*' },\n '@': { open: '(?:', close: ')' }\n}\n\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nvar qmark = '[^/]'\n\n// * => any number of characters\nvar star = qmark + '*?'\n\n// ** when dots are allowed. Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nvar twoStarDot = '(?:(?!(?:\\\\\\/|^)(?:\\\\.{1,2})($|\\\\\\/)).)*?'\n\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nvar twoStarNoDot = '(?:(?!(?:\\\\\\/|^)\\\\.).)*?'\n\n// characters that need to be escaped in RegExp.\nvar reSpecials = charSet('().*{}+?[]^$\\\\!')\n\n// \"abc\" -> { a:true, b:true, c:true }\nfunction charSet (s) {\n return s.split('').reduce(function (set, c) {\n set[c] = true\n return set\n }, {})\n}\n\n// normalizes slashes.\nvar slashSplit = /\\/+/\n\nminimatch.filter = filter\nfunction filter (pattern, options) {\n options = options || {}\n return function (p, i, list) {\n return minimatch(p, pattern, options)\n }\n}\n\nfunction ext (a, b) {\n a = a || {}\n b = b || {}\n var t = {}\n Object.keys(b).forEach(function (k) {\n t[k] = b[k]\n })\n Object.keys(a).forEach(function (k) {\n t[k] = a[k]\n })\n return t\n}\n\nminimatch.defaults = function (def) {\n if (!def || !Object.keys(def).length) return minimatch\n\n var orig = minimatch\n\n var m = function minimatch (p, pattern, options) {\n return orig.minimatch(p, pattern, ext(def, options))\n }\n\n m.Minimatch = function Minimatch (pattern, options) {\n return new orig.Minimatch(pattern, ext(def, options))\n }\n\n return m\n}\n\nMinimatch.defaults = function (def) {\n if (!def || !Object.keys(def).length) return Minimatch\n return minimatch.defaults(def).Minimatch\n}\n\nfunction minimatch (p, pattern, options) {\n if (typeof pattern !== 'string') {\n throw new TypeError('glob pattern string required')\n }\n\n if (!options) options = {}\n\n // shortcut: comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n return false\n }\n\n // \"\" only matches \"\"\n if (pattern.trim() === '') return p === ''\n\n return new Minimatch(pattern, options).match(p)\n}\n\nfunction Minimatch (pattern, options) {\n if (!(this instanceof Minimatch)) {\n return new Minimatch(pattern, options)\n }\n\n if (typeof pattern !== 'string') {\n throw new TypeError('glob pattern string required')\n }\n\n if (!options) options = {}\n pattern = pattern.trim()\n\n // windows support: need to use /, not \\\n if (path.sep !== '/') {\n pattern = pattern.split(path.sep).join('/')\n }\n\n this.options = options\n this.set = []\n this.pattern = pattern\n this.regexp = null\n this.negate = false\n this.comment = false\n this.empty = false\n\n // make the set of regexps etc.\n this.make()\n}\n\nMinimatch.prototype.debug = function () {}\n\nMinimatch.prototype.make = make\nfunction make () {\n // don't do it more than once.\n if (this._made) return\n\n var pattern = this.pattern\n var options = this.options\n\n // empty patterns and comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n this.comment = true\n return\n }\n if (!pattern) {\n this.empty = true\n return\n }\n\n // step 1: figure out negation, etc.\n this.parseNegate()\n\n // step 2: expand braces\n var set = this.globSet = this.braceExpand()\n\n if (options.debug) this.debug = console.error\n\n this.debug(this.pattern, set)\n\n // step 3: now we have a set, so turn each one into a series of path-portion\n // matching patterns.\n // These will be regexps, except in the case of \"**\", which is\n // set to the GLOBSTAR object for globstar behavior,\n // and will not contain any / characters\n set = this.globParts = set.map(function (s) {\n return s.split(slashSplit)\n })\n\n this.debug(this.pattern, set)\n\n // glob --> regexps\n set = set.map(function (s, si, set) {\n return s.map(this.parse, this)\n }, this)\n\n this.debug(this.pattern, set)\n\n // filter out everything that didn't compile properly.\n set = set.filter(function (s) {\n return s.indexOf(false) === -1\n })\n\n this.debug(this.pattern, set)\n\n this.set = set\n}\n\nMinimatch.prototype.parseNegate = parseNegate\nfunction parseNegate () {\n var pattern = this.pattern\n var negate = false\n var options = this.options\n var negateOffset = 0\n\n if (options.nonegate) return\n\n for (var i = 0, l = pattern.length\n ; i < l && pattern.charAt(i) === '!'\n ; i++) {\n negate = !negate\n negateOffset++\n }\n\n if (negateOffset) this.pattern = pattern.substr(negateOffset)\n this.negate = negate\n}\n\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nminimatch.braceExpand = function (pattern, options) {\n return braceExpand(pattern, options)\n}\n\nMinimatch.prototype.braceExpand = braceExpand\n\nfunction braceExpand (pattern, options) {\n if (!options) {\n if (this instanceof Minimatch) {\n options = this.options\n } else {\n options = {}\n }\n }\n\n pattern = typeof pattern === 'undefined'\n ? this.pattern : pattern\n\n if (typeof pattern === 'undefined') {\n throw new TypeError('undefined pattern')\n }\n\n if (options.nobrace ||\n !pattern.match(/\\{.*\\}/)) {\n // shortcut. no need to expand.\n return [pattern]\n }\n\n return expand(pattern)\n}\n\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion. Otherwise, any series\n// of * is equivalent to a single *. Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\nMinimatch.prototype.parse = parse\nvar SUBPARSE = {}\nfunction parse (pattern, isSub) {\n if (pattern.length > 1024 * 64) {\n throw new TypeError('pattern is too long')\n }\n\n var options = this.options\n\n // shortcuts\n if (!options.noglobstar && pattern === '**') return GLOBSTAR\n if (pattern === '') return ''\n\n var re = ''\n var hasMagic = !!options.nocase\n var escaping = false\n // ? => one single character\n var patternListStack = []\n var negativeLists = []\n var stateChar\n var inClass = false\n var reClassStart = -1\n var classStart = -1\n // . and .. never match anything that doesn't start with .,\n // even when options.dot is set.\n var patternStart = pattern.charAt(0) === '.' ? '' // anything\n // not (start or / followed by . or .. followed by / or end)\n : options.dot ? '(?!(?:^|\\\\\\/)\\\\.{1,2}(?:$|\\\\\\/))'\n : '(?!\\\\.)'\n var self = this\n\n function clearStateChar () {\n if (stateChar) {\n // we had some state-tracking character\n // that wasn't consumed by this pass.\n switch (stateChar) {\n case '*':\n re += star\n hasMagic = true\n break\n case '?':\n re += qmark\n hasMagic = true\n break\n default:\n re += '\\\\' + stateChar\n break\n }\n self.debug('clearStateChar %j %j', stateChar, re)\n stateChar = false\n }\n }\n\n for (var i = 0, len = pattern.length, c\n ; (i < len) && (c = pattern.charAt(i))\n ; i++) {\n this.debug('%s\\t%s %s %j', pattern, i, re, c)\n\n // skip over any that are escaped.\n if (escaping && reSpecials[c]) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n switch (c) {\n case '/':\n // completely not allowed, even escaped.\n // Should already be path-split by now.\n return false\n\n case '\\\\':\n clearStateChar()\n escaping = true\n continue\n\n // the various stateChar values\n // for the \"extglob\" stuff.\n case '?':\n case '*':\n case '+':\n case '@':\n case '!':\n this.debug('%s\\t%s %s %j <-- stateChar', pattern, i, re, c)\n\n // all of those are literals inside a class, except that\n // the glob [!a] means [^a] in regexp\n if (inClass) {\n this.debug(' in class')\n if (c === '!' && i === classStart + 1) c = '^'\n re += c\n continue\n }\n\n // if we already have a stateChar, then it means\n // that there was something like ** or +? in there.\n // Handle the stateChar, then proceed with this one.\n self.debug('call clearStateChar %j', stateChar)\n clearStateChar()\n stateChar = c\n // if extglob is disabled, then +(asdf|foo) isn't a thing.\n // just clear the statechar *now*, rather than even diving into\n // the patternList stuff.\n if (options.noext) clearStateChar()\n continue\n\n case '(':\n if (inClass) {\n re += '('\n continue\n }\n\n if (!stateChar) {\n re += '\\\\('\n continue\n }\n\n patternListStack.push({\n type: stateChar,\n start: i - 1,\n reStart: re.length,\n open: plTypes[stateChar].open,\n close: plTypes[stateChar].close\n })\n // negation is (?:(?!js)[^/]*)\n re += stateChar === '!' ? '(?:(?!(?:' : '(?:'\n this.debug('plType %j %j', stateChar, re)\n stateChar = false\n continue\n\n case ')':\n if (inClass || !patternListStack.length) {\n re += '\\\\)'\n continue\n }\n\n clearStateChar()\n hasMagic = true\n var pl = patternListStack.pop()\n // negation is (?:(?!js)[^/]*)\n // The others are (?:)