From 589a84166dca1229cfaeb86232a817d84dd725d7 Mon Sep 17 00:00:00 2001 From: Matt Witherspoon <32485495+spoonincode@users.noreply.github.com> Date: Sat, 29 Jun 2024 16:32:21 -0400 Subject: [PATCH] fix zip streaming; upgrade packages & use node20 --- action.yml | 2 +- dist/index.mjs | 96969 ++++++++++++++++++++++---------------------- dist/licenses.txt | 432 +- main.mjs | 43 +- package.json | 12 +- 5 files changed, 47587 insertions(+), 49871 deletions(-) diff --git a/action.yml b/action.yml index ea551ac..281aff8 100644 --- a/action.yml +++ b/action.yml @@ -33,5 +33,5 @@ outputs: downloaded-file: description: 'The name of the downloaded file, or empty string if failure' runs: - using: 'node16' + using: 'node20' main: 'dist/index.mjs' diff --git a/dist/index.mjs b/dist/index.mjs index 64ee107..00bfe72 100644 --- a/dist/index.mjs +++ b/dist/index.mjs @@ -1,7 +1,7 @@ import { createRequire as __WEBPACK_EXTERNAL_createRequire } from "module"; /******/ var __webpack_modules__ = ({ -/***/ 5241: +/***/ 9970: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -27,7 +27,7 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.issue = exports.issueCommand = void 0; const os = __importStar(__nccwpck_require__(2037)); -const utils_1 = __nccwpck_require__(6321); +const utils_1 = __nccwpck_require__(3030); /** * Commands * @@ -99,7 +99,7 @@ function escapeProperty(s) { /***/ }), -/***/ 2186: +/***/ 2249: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -133,12 +133,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(5241); -const file_command_1 = __nccwpck_require__(717); -const utils_1 = __nccwpck_require__(6321); +const command_1 = __nccwpck_require__(9970); +const file_command_1 = __nccwpck_require__(621); +const utils_1 = __nccwpck_require__(3030); const os = __importStar(__nccwpck_require__(2037)); const path = __importStar(__nccwpck_require__(1017)); -const oidc_utils_1 = __nccwpck_require__(8041); +const oidc_utils_1 = __nccwpck_require__(3523); /** * The code to exit an action */ @@ -423,17 +423,17 @@ exports.getIDToken = getIDToken; /** * Summary exports */ -var summary_1 = __nccwpck_require__(1327); +var summary_1 = __nccwpck_require__(9286); Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); /** * @deprecated use core.summary */ -var summary_2 = __nccwpck_require__(1327); +var summary_2 = __nccwpck_require__(9286); Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); /** * Path exports */ -var path_utils_1 = __nccwpck_require__(2981); +var path_utils_1 = __nccwpck_require__(8579); Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); @@ -441,7 +441,7 @@ Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: funct /***/ }), -/***/ 717: +/***/ 621: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -471,8 +471,8 @@ exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(7147)); const os = __importStar(__nccwpck_require__(2037)); -const uuid_1 = __nccwpck_require__(5840); -const utils_1 = __nccwpck_require__(6321); +const uuid_1 = __nccwpck_require__(5345); +const utils_1 = __nccwpck_require__(3030); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { @@ -505,7 +505,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage; /***/ }), -/***/ 8041: +/***/ 3523: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -520,9 +520,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(6255); -const auth_1 = __nccwpck_require__(5526); -const core_1 = __nccwpck_require__(2186); +const http_client_1 = __nccwpck_require__(4470); +const auth_1 = __nccwpck_require__(1425); +const core_1 = __nccwpck_require__(2249); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { const requestOptions = { @@ -588,7 +588,7 @@ exports.OidcClient = OidcClient; /***/ }), -/***/ 2981: +/***/ 8579: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -652,7 +652,7 @@ exports.toPlatformPath = toPlatformPath; /***/ }), -/***/ 1327: +/***/ 9286: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -941,7 +941,7 @@ exports.summary = _summary; /***/ }), -/***/ 6321: +/***/ 3030: /***/ ((__unused_webpack_module, exports) => { @@ -987,7 +987,7 @@ exports.toCommandProperties = toCommandProperties; /***/ }), -/***/ 4087: +/***/ 5014: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { @@ -1003,8 +1003,8 @@ class Context { var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } else { const path = process.env.GITHUB_EVENT_PATH; @@ -1022,7 +1022,8 @@ class Context { this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; @@ -1047,13 +1048,17 @@ exports.Context = Context; /***/ }), -/***/ 5438: +/***/ 7962: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1066,14 +1071,14 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOctokit = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const utils_1 = __nccwpck_require__(3030); +const Context = __importStar(__nccwpck_require__(5014)); +const utils_1 = __nccwpck_require__(9740); exports.context = new Context.Context(); /** * Returns a hydrated octokit ready to use for GitHub Actions @@ -1083,20 +1088,24 @@ exports.context = new Context.Context(); */ function getOctokit(token, options, ...additionalPlugins) { const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); - return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options)); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); } exports.getOctokit = getOctokit; //# sourceMappingURL=github.js.map /***/ }), -/***/ 7914: +/***/ 9295: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1109,13 +1118,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(6255)); +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(4470)); +const undici_1 = __nccwpck_require__(9027); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); @@ -1131,6 +1150,19 @@ function getProxyAgent(destinationUrl) { return hc.getAgent(destinationUrl); } exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; function getApiBaseUrl() { return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } @@ -1139,13 +1171,17 @@ exports.getApiBaseUrl = getApiBaseUrl; /***/ }), -/***/ 3030: +/***/ 9740: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1158,24 +1194,25 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const Utils = __importStar(__nccwpck_require__(7914)); +const Context = __importStar(__nccwpck_require__(5014)); +const Utils = __importStar(__nccwpck_require__(9295)); // octokit + plugins -const core_1 = __nccwpck_require__(6762); -const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); -const plugin_paginate_rest_1 = __nccwpck_require__(4193); +const core_1 = __nccwpck_require__(5175); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(2317); +const plugin_paginate_rest_1 = __nccwpck_require__(1768); exports.context = new Context.Context(); const baseUrl = Utils.getApiBaseUrl(); exports.defaults = { baseUrl, request: { - agent: Utils.getProxyAgent(baseUrl) + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) } }; exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); @@ -1199,7 +1236,7 @@ exports.getOctokitOptions = getOctokitOptions; /***/ }), -/***/ 5526: +/***/ 1425: /***/ (function(__unused_webpack_module, exports) { @@ -1286,7 +1323,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand /***/ }), -/***/ 6255: +/***/ 4470: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -1327,9 +1364,9 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; const http = __importStar(__nccwpck_require__(3685)); const https = __importStar(__nccwpck_require__(5687)); -const pm = __importStar(__nccwpck_require__(9835)); -const tunnel = __importStar(__nccwpck_require__(4294)); -const undici_1 = __nccwpck_require__(1773); +const pm = __importStar(__nccwpck_require__(3102)); +const tunnel = __importStar(__nccwpck_require__(3449)); +const undici_1 = __nccwpck_require__(9027); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -1793,7 +1830,7 @@ class HttpClient { if (this._keepAlive && useProxy) { agent = this._proxyAgent; } - if (this._keepAlive && !useProxy) { + if (!useProxy) { agent = this._agent; } // if agent is already assigned use that agent. @@ -1825,16 +1862,12 @@ class HttpClient { agent = tunnelAgent(agentOptions); this._proxyAgent = agent; } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { + // if tunneling agent isn't assigned create a new agent + if (!agent) { const options = { keepAlive: this._keepAlive, maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } if (usingSsl && this._ignoreSslError) { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options @@ -1948,7 +1981,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa /***/ }), -/***/ 9835: +/***/ 3102: /***/ ((__unused_webpack_module, exports) => { @@ -2036,6983 +2069,4135 @@ function isLoopbackAddress(host) { /***/ }), -/***/ 2856: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -const WritableStream = (__nccwpck_require__(4492).Writable) -const inherits = (__nccwpck_require__(7261).inherits) - -const StreamSearch = __nccwpck_require__(8534) - -const PartStream = __nccwpck_require__(3500) -const HeaderParser = __nccwpck_require__(333) - -const DASH = 45 -const B_ONEDASH = Buffer.from('-') -const B_CRLF = Buffer.from('\r\n') -const EMPTY_FN = function () {} - -function Dicer (cfg) { - if (!(this instanceof Dicer)) { return new Dicer(cfg) } - WritableStream.call(this, cfg) - - if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } +/***/ 9840: +/***/ ((module) => { - if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } - this._headerFirst = cfg.headerFirst +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - this._dashes = 0 - this._parts = 0 - this._finished = false - this._realFinish = false - this._isPreamble = true - this._justMatched = false - this._firstWrite = true - this._inHeader = true - this._part = undefined - this._cb = undefined - this._ignoreData = false - this._partOpts = { highWaterMark: cfg.partHwm } - this._pause = false +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); - const self = this - this._hparser = new HeaderParser(cfg) - this._hparser.on('header', function (header) { - self._inHeader = false - self._part.emit('header', header) - }) +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; } -inherits(Dicer, WritableStream) -Dicer.prototype.emit = function (ev) { - if (ev === 'finish' && !this._realFinish) { - if (!this._finished) { - const self = this - process.nextTick(function () { - self.emit('error', new Error('Unexpected end of multipart data')) - if (self._part && !self._ignoreData) { - const type = (self._isPreamble ? 'Preamble' : 'Part') - self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) - self._part.push(null) - process.nextTick(function () { - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - return - } - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - } - } else { WritableStream.prototype.emit.apply(this, arguments) } +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; } -Dicer.prototype._write = function (data, encoding, cb) { - // ignore unexpected data (e.g. extra trailer data after finished) - if (!this._hparser && !this._bparser) { return cb() } +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} - if (this._headerFirst && this._isPreamble) { - if (!this._part) { - this._part = new PartStream(this._partOpts) - if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } - } - const r = this._hparser.push(data) - if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } - - // allows for "easier" testing - if (this._firstWrite) { - this._bparser.push(B_CRLF) - this._firstWrite = false + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); - this._bparser.push(data) - if (this._pause) { this._cb = cb } else { cb() } -} +/***/ }), -Dicer.prototype.reset = function () { - this._part = undefined - this._bparser = undefined - this._hparser = undefined -} +/***/ 5175: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Dicer.prototype.setBoundary = function (boundary) { - const self = this - this._bparser = new StreamSearch('\r\n--' + boundary) - this._bparser.on('info', function (isMatch, data, start, end) { - self._oninfo(isMatch, data, start, end) - }) -} -Dicer.prototype._ignore = function () { - if (this._part && !this._ignoreData) { - this._ignoreData = true - this._part.on('error', EMPTY_FN) - // we must perform some kind of read on the stream even though we are - // ignoring the data, otherwise node's Readable stream will not emit 'end' - // after pushing null to the stream - this._part.resume() +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } -} - -Dicer.prototype._oninfo = function (isMatch, data, start, end) { - let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - if (!this._part && this._justMatched && data) { - while (this._dashes < 2 && (start + i) < end) { - if (data[start + i] === DASH) { - ++i - ++this._dashes - } else { - if (this._dashes) { buf = B_ONEDASH } - this._dashes = 0 - break +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(599); +var import_before_after_hook = __nccwpck_require__(5596); +var import_request = __nccwpck_require__(3612); +var import_graphql = __nccwpck_require__(3583); +var import_auth_token = __nccwpck_require__(9840); + +// pkg/dist-src/version.js +var VERSION = "5.2.0"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); } - } - if (this._dashes === 2) { - if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } - this.reset() - this._finished = true - // no more parts will be added - if (self._parts === 0) { - self._realFinish = true - self.emit('finish') - self._realFinish = false + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); } - } - if (this._dashes) { return } + }; + return NewOctokit; } - if (this._justMatched) { this._justMatched = false } - if (!this._part) { - this._part = new PartStream(this._partOpts) - this._part._read = function (n) { - self._unpause() + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; } - if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } - if (!this._isPreamble) { this._inHeader = true } - } - if (data && start < end && !this._ignoreData) { - if (this._isPreamble || !this._inHeader) { - if (buf) { shouldWriteMore = this._part.push(buf) } - shouldWriteMore = this._part.push(data.slice(start, end)) - if (!shouldWriteMore) { this._pause = true } - } else if (!this._isPreamble && this._inHeader) { - if (buf) { this._hparser.push(buf) } - r = this._hparser.push(data.slice(start, end)) - if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; } - } - if (isMatch) { - this._hparser.reset() - if (this._isPreamble) { this._isPreamble = false } else { - if (start !== end) { - ++this._parts - this._part.on('end', function () { - if (--self._parts === 0) { - if (self._finished) { - self._realFinish = true - self.emit('finish') - self._realFinish = false - } else { - self._unpause() - } - } - }) + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); } - this._part.push(null) - this._part = undefined - this._ignoreData = false - this._justMatched = true - this._dashes = 0 - } -} - -Dicer.prototype._unpause = function () { - if (!this._pause) { return } - - this._pause = false - if (this._cb) { - const cb = this._cb - this._cb = undefined - cb() } -} - -module.exports = Dicer +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), -/***/ 333: +/***/ 4657: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -const EventEmitter = (__nccwpck_require__(5673).EventEmitter) -const inherits = (__nccwpck_require__(7261).inherits) -const getLimit = __nccwpck_require__(9692) +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); -const StreamSearch = __nccwpck_require__(8534) +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(599); -const B_DCRLF = Buffer.from('\r\n\r\n') -const RE_CRLF = /\r\n/g -const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex +// pkg/dist-src/version.js +var VERSION = "9.0.5"; -function HeaderParser (cfg) { - EventEmitter.call(this) +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; - cfg = cfg || {} - const self = this - this.nread = 0 - this.maxed = false - this.npairs = 0 - this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) - this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) - this.buffer = '' - this.header = {} - this.finished = false - this.ss = new StreamSearch(B_DCRLF) - this.ss.on('info', function (isMatch, data, start, end) { - if (data && !self.maxed) { - if (self.nread + end - start >= self.maxHeaderSize) { - end = self.maxHeaderSize - self.nread + start - self.nread = self.maxHeaderSize - self.maxed = true - } else { self.nread += (end - start) } +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} - self.buffer += data.toString('binary', start, end) - } - if (isMatch) { self._finish() } - }) +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); } -inherits(HeaderParser, EventEmitter) -HeaderParser.prototype.push = function (data) { - const r = this.ss.push(data) - if (this.finished) { return r } +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; } -HeaderParser.prototype.reset = function () { - this.finished = false - this.buffer = '' - this.header = {} - this.ss.reset() +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; } -HeaderParser.prototype._finish = function () { - if (this.buffer) { this._parseHeader() } - this.ss.matches = this.ss.maxMatches - const header = this.header - this.header = {} - this.buffer = '' - this.finished = true - this.nread = this.npairs = 0 - this.maxed = false - this.emit('header', header) +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; } -HeaderParser.prototype._parseHeader = function () { - if (this.npairs === this.maxHeaderPairs) { return } +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} - const lines = this.buffer.split(RE_CRLF) - const len = lines.length - let m, h +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (lines[i].length === 0) { continue } - if (lines[i][0] === '\t' || lines[i][0] === ' ') { - // folded header content - // RFC2822 says to just remove the CRLF and not the whitespace following - // it, so we follow the RFC and include the leading whitespace ... - if (h) { - this.header[h][this.header[h].length - 1] += lines[i] - continue - } +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; } + } + return result; +} - const posColon = lines[i].indexOf(':') - if ( - posColon === -1 || - posColon === 0 - ) { - return +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } - m = RE_HDR.exec(lines[i]) - h = m[1].toLowerCase() - this.header[h] = this.header[h] || [] - this.header[h].push((m[2] || '')) - if (++this.npairs === this.maxHeaderPairs) { break } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; } } - -module.exports = HeaderParser - - -/***/ }), - -/***/ 3500: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -const inherits = (__nccwpck_require__(7261).inherits) -const ReadableStream = (__nccwpck_require__(4492).Readable) - -function PartStream (opts) { - ReadableStream.call(this, opts) +function isDefined(value) { + return value !== void 0 && value !== null; } -inherits(PartStream, ReadableStream) - -PartStream.prototype._read = function (n) {} - -module.exports = PartStream - - -/***/ }), - -/***/ 8534: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -/** - * Copyright Brian White. All rights reserved. - * - * @see https://github.com/mscdex/streamsearch - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation - * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool - */ -const EventEmitter = (__nccwpck_require__(5673).EventEmitter) -const inherits = (__nccwpck_require__(7261).inherits) - -function SBMH (needle) { - if (typeof needle === 'string') { - needle = Buffer.from(needle) +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } } - - if (!Buffer.isBuffer(needle)) { - throw new TypeError('The needle has to be a String or a Buffer.') + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); } +} - const needleLength = needle.length - - if (needleLength === 0) { - throw new Error('The needle cannot be an empty String/Buffer.') +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; } - - if (needleLength > 256) { - throw new Error('The needle cannot have a length bigger than 256.') + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } } - - this.maxMatches = Infinity - this.matches = 0 - - this._occ = new Array(256) - .fill(needleLength) // Initialize occurrence table. - this._lookbehind_size = 0 - this._needle = needle - this._bufpos = 0 - - this._lookbehind = Buffer.alloc(needleLength) - - // Populate occurrence table with analysis of the needle, - // ignoring last letter. - for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var - this._occ[needle[i]] = needleLength - 1 - i + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); } -inherits(SBMH, EventEmitter) -SBMH.prototype.reset = function () { - this._lookbehind_size = 0 - this.matches = 0 - this._bufpos = 0 +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); } -SBMH.prototype.push = function (chunk, pos) { - if (!Buffer.isBuffer(chunk)) { - chunk = Buffer.from(chunk, 'binary') - } - const chlen = chunk.length - this._bufpos = pos || 0 - let r - while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } - return r +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); } -SBMH.prototype._sbmh_feed = function (data) { - const len = data.length - const needle = this._needle - const needleLength = needle.length - const lastNeedleChar = needle[needleLength - 1] +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); - // Positive: points to a position in `data` - // pos == 3 points to data[3] - // Negative: points to a position in the lookbehind buffer - // pos == -2 points to lookbehind[lookbehind_size - 2] - let pos = -this._lookbehind_size - let ch - if (pos < 0) { - // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool - // search with character lookup code that considers both the - // lookbehind buffer and the current round's haystack data. - // - // Loop until - // there is a match. - // or until - // we've moved past the position that requires the - // lookbehind buffer. In this case we switch to the - // optimized loop. - // or until - // the character to look at lies outside the haystack. - while (pos < 0 && pos <= len - needleLength) { - ch = this._sbmh_lookup_char(data, pos + needleLength - 1) +/***/ }), - if ( - ch === lastNeedleChar && - this._sbmh_memcmp(data, pos, needleLength - 1) - ) { - this._lookbehind_size = 0 - ++this.matches - this.emit('info', true) +/***/ 3583: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return (this._bufpos = pos + needleLength) - } - pos += this._occ[ch] - } - // No match. +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - if (pos < 0) { - // There's too few data for Boyer-Moore-Horspool to run, - // so let's use a different algorithm to skip as much as - // we can. - // Forward pos until - // the trailing part of lookbehind + data - // looks like the beginning of the needle - // or until - // pos == 0 - while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } - } +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(3612); +var import_universal_user_agent = __nccwpck_require__(599); - if (pos >= 0) { - // Discard lookbehind buffer. - this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) - this._lookbehind_size = 0 - } else { - // Cut off part of the lookbehind buffer that has - // been processed and append the entire haystack - // into it. - const bytesToCutOff = this._lookbehind_size + pos - if (bytesToCutOff > 0) { - // The cut off data is guaranteed not to contain the needle. - this.emit('info', false, this._lookbehind, 0, bytesToCutOff) - } +// pkg/dist-src/version.js +var VERSION = "7.1.0"; - this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, - this._lookbehind_size - bytesToCutOff) - this._lookbehind_size -= bytesToCutOff +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(3612); - data.copy(this._lookbehind, this._lookbehind_size) - this._lookbehind_size += len +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(3612); - this._bufpos = len - return len +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); } } +}; - pos += (pos >= 0) * this._bufpos - - // Lookbehind buffer is now empty. We only need to check if the - // needle is in the haystack. - if (data.indexOf(needle, pos) !== -1) { - pos = data.indexOf(needle, pos) - ++this.matches - if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } - - return (this._bufpos = pos + needleLength) - } else { - pos = len - needleLength - } - - // There was no match. If there's trailing haystack data that we cannot - // match yet using the Boyer-Moore-Horspool algorithm (because the trailing - // data is less than the needle size) then match using a modified - // algorithm that starts matching from the beginning instead of the end. - // Whatever trailing data is left after running this algorithm is added to - // the lookbehind buffer. - while ( - pos < len && - ( - data[pos] !== needle[0] || - ( - (Buffer.compare( - data.subarray(pos, pos + len - pos), - needle.subarray(0, len - pos) - ) !== 0) - ) - ) - ) { - ++pos +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } } - if (pos < len) { - data.copy(this._lookbehind, 0, pos, pos + (len - pos)) - this._lookbehind_size = len - pos + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } - - // Everything until pos is guaranteed not to contain needle data. - if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } - - this._bufpos = len - return len + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); } -SBMH.prototype._sbmh_lookup_char = function (data, pos) { - return (pos < 0) - ? this._lookbehind[this._lookbehind_size + pos] - : data[pos] +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); } -SBMH.prototype._sbmh_memcmp = function (data, pos, len) { - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } - } - return true +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); } - -module.exports = SBMH +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), -/***/ 3438: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - +/***/ 1768: +/***/ ((module) => { -const WritableStream = (__nccwpck_require__(4492).Writable) -const { inherits } = __nccwpck_require__(7261) -const Dicer = __nccwpck_require__(2856) +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -const MultipartParser = __nccwpck_require__(415) -const UrlencodedParser = __nccwpck_require__(6780) -const parseParams = __nccwpck_require__(4426) +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); -function Busboy (opts) { - if (!(this instanceof Busboy)) { return new Busboy(opts) } +// pkg/dist-src/version.js +var VERSION = "9.2.1"; - if (typeof opts !== 'object') { - throw new TypeError('Busboy expected an options-Object.') +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; } - if (typeof opts.headers !== 'object') { - throw new TypeError('Busboy expected an options-Object with headers-attribute.') + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; } - if (typeof opts.headers['content-type'] !== 'string') { - throw new TypeError('Missing Content-Type-header.') + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; } + response.data.total_count = totalCount; + return response; +} - const { - headers, - ...streamOptions - } = opts +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} - this.opts = { - autoDestroy: false, - ...streamOptions +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; } - WritableStream.call(this, this.opts) - - this._done = false - this._parser = this.getParserByHeaders(headers) - this._finished = false + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); } -inherits(Busboy, WritableStream) - -Busboy.prototype.emit = function (ev) { - if (ev === 'finish') { - if (!this._done) { - this._parser?.end() - return - } else if (this._finished) { - return +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; } - this._finished = true - } - WritableStream.prototype.emit.apply(this, arguments) + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); } -Busboy.prototype.getParserByHeaders = function (headers) { - const parsed = parseParams(headers['content-type']) +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); - const cfg = { - defCharset: this.opts.defCharset, - fileHwm: this.opts.fileHwm, - headers, - highWaterMark: this.opts.highWaterMark, - isPartAFile: this.opts.isPartAFile, - limits: this.opts.limits, - parsedConType: parsed, - preservePath: this.opts.preservePath - } +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/organization-roles/{role_id}/teams", + "GET /orgs/{org}/organization-roles/{role_id}/users", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; - if (MultipartParser.detect.test(parsed[0])) { - return new MultipartParser(this, cfg) - } - if (UrlencodedParser.detect.test(parsed[0])) { - return new UrlencodedParser(this, cfg) +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; } - throw new Error('Unsupported Content-Type.') -} - -Busboy.prototype._write = function (chunk, encoding, cb) { - this._parser.write(chunk, cb) } -module.exports = Busboy -module.exports["default"] = Busboy -module.exports.Busboy = Busboy - -module.exports.Dicer = Dicer - - -/***/ }), - -/***/ 415: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -// TODO: -// * support 1 nested multipart level -// (see second multipart example here: -// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) -// * support limits.fieldNameSize -// -- this will require modifications to utils.parseParams - -const { Readable } = __nccwpck_require__(4492) -const { inherits } = __nccwpck_require__(7261) - -const Dicer = __nccwpck_require__(2856) - -const parseParams = __nccwpck_require__(4426) -const decodeText = __nccwpck_require__(9136) -const basename = __nccwpck_require__(496) -const getLimit = __nccwpck_require__(9692) - -const RE_BOUNDARY = /^boundary$/i -const RE_FIELD = /^form-data$/i -const RE_CHARSET = /^charset$/i -const RE_FILENAME = /^filename$/i -const RE_NAME = /^name$/i - -Multipart.detect = /^multipart\/form-data/i -function Multipart (boy, cfg) { - let i - let len - const self = this - let boundary - const limits = cfg.limits - const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) - const parsedConType = cfg.parsedConType || [] - const defCharset = cfg.defCharset || 'utf8' - const preservePath = cfg.preservePath - const fileOpts = { highWaterMark: cfg.fileHwm } - - for (i = 0, len = parsedConType.length; i < len; ++i) { - if (Array.isArray(parsedConType[i]) && - RE_BOUNDARY.test(parsedConType[i][0])) { - boundary = parsedConType[i][1] - break - } - } - - function checkFinished () { - if (nends === 0 && finished && !boy._done) { - finished = false - self.end() - } - } - - if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } - - const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) - const filesLimit = getLimit(limits, 'files', Infinity) - const fieldsLimit = getLimit(limits, 'fields', Infinity) - const partsLimit = getLimit(limits, 'parts', Infinity) - const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) - const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) - - let nfiles = 0 - let nfields = 0 - let nends = 0 - let curFile - let curField - let finished = false - - this._needDrain = false - this._pause = false - this._cb = undefined - this._nparts = 0 - this._boy = boy - - const parserCfg = { - boundary, - maxHeaderPairs: headerPairsLimit, - maxHeaderSize: headerSizeLimit, - partHwm: fileOpts.highWaterMark, - highWaterMark: cfg.highWaterMark - } - - this.parser = new Dicer(parserCfg) - this.parser.on('drain', function () { - self._needDrain = false - if (self._cb && !self._pause) { - const cb = self._cb - self._cb = undefined - cb() - } - }).on('part', function onPart (part) { - if (++self._nparts > partsLimit) { - self.parser.removeListener('part', onPart) - self.parser.on('part', skipPart) - boy.hitPartsLimit = true - boy.emit('partsLimit') - return skipPart(part) - } - - // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let - // us emit 'end' early since we know the part has ended if we are already - // seeing the next part - if (curField) { - const field = curField - field.emit('end') - field.removeAllListeners('end') - } - - part.on('header', function (header) { - let contype - let fieldname - let parsed - let charset - let encoding - let filename - let nsize = 0 - - if (header['content-type']) { - parsed = parseParams(header['content-type'][0]) - if (parsed[0]) { - contype = parsed[0].toLowerCase() - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_CHARSET.test(parsed[i][0])) { - charset = parsed[i][1].toLowerCase() - break - } - } - } - } - - if (contype === undefined) { contype = 'text/plain' } - if (charset === undefined) { charset = defCharset } - - if (header['content-disposition']) { - parsed = parseParams(header['content-disposition'][0]) - if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_NAME.test(parsed[i][0])) { - fieldname = parsed[i][1] - } else if (RE_FILENAME.test(parsed[i][0])) { - filename = parsed[i][1] - if (!preservePath) { filename = basename(filename) } - } - } - } else { return skipPart(part) } - - if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } - - let onData, - onEnd - - if (isPartAFile(fieldname, contype, filename)) { - // file/binary field - if (nfiles === filesLimit) { - if (!boy.hitFilesLimit) { - boy.hitFilesLimit = true - boy.emit('filesLimit') - } - return skipPart(part) - } - - ++nfiles - - if (!boy._events.file) { - self.parser._ignore() - return - } - - ++nends - const file = new FileStream(fileOpts) - curFile = file - file.on('end', function () { - --nends - self._pause = false - checkFinished() - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - }) - file._read = function (n) { - if (!self._pause) { return } - self._pause = false - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - } - boy.emit('file', fieldname, file, filename, encoding, contype) - - onData = function (data) { - if ((nsize += data.length) > fileSizeLimit) { - const extralen = fileSizeLimit - nsize + data.length - if (extralen > 0) { file.push(data.slice(0, extralen)) } - file.truncated = true - file.bytesRead = fileSizeLimit - part.removeAllListeners('data') - file.emit('limit') - return - } else if (!file.push(data)) { self._pause = true } - - file.bytesRead = nsize - } - - onEnd = function () { - curFile = undefined - file.push(null) - } - } else { - // non-file field - if (nfields === fieldsLimit) { - if (!boy.hitFieldsLimit) { - boy.hitFieldsLimit = true - boy.emit('fieldsLimit') - } - return skipPart(part) - } - - ++nfields - ++nends - let buffer = '' - let truncated = false - curField = part - - onData = function (data) { - if ((nsize += data.length) > fieldSizeLimit) { - const extralen = (fieldSizeLimit - (nsize - data.length)) - buffer += data.toString('binary', 0, extralen) - truncated = true - part.removeAllListeners('data') - } else { buffer += data.toString('binary') } - } - - onEnd = function () { - curField = undefined - if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } - boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) - --nends - checkFinished() - } - } - - /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become - broken. Streams2/streams3 is a huge black box of confusion, but - somehow overriding the sync state seems to fix things again (and still - seems to work for previous node versions). - */ - part._readableState.sync = false - - part.on('data', onData) - part.on('end', onEnd) - }).on('error', function (err) { - if (curFile) { curFile.emit('error', err) } - }) - }).on('error', function (err) { - boy.emit('error', err) - }).on('finish', function () { - finished = true - checkFinished() - }) -} - -Multipart.prototype.write = function (chunk, cb) { - const r = this.parser.write(chunk) - if (r && !this._pause) { - cb() - } else { - this._needDrain = !r - this._cb = cb - } -} - -Multipart.prototype.end = function () { - const self = this - - if (self.parser.writable) { - self.parser.end() - } else if (!self._boy._done) { - process.nextTick(function () { - self._boy._done = true - self._boy.emit('finish') - }) - } -} - -function skipPart (part) { - part.resume() -} - -function FileStream (opts) { - Readable.call(this, opts) - - this.bytesRead = 0 - - this.truncated = false -} - -inherits(FileStream, Readable) - -FileStream.prototype._read = function (n) {} - -module.exports = Multipart - - -/***/ }), - -/***/ 6780: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -const Decoder = __nccwpck_require__(9730) -const decodeText = __nccwpck_require__(9136) -const getLimit = __nccwpck_require__(9692) - -const RE_CHARSET = /^charset$/i - -UrlEncoded.detect = /^application\/x-www-form-urlencoded/i -function UrlEncoded (boy, cfg) { - const limits = cfg.limits - const parsedConType = cfg.parsedConType - this.boy = boy - - this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) - this.fieldsLimit = getLimit(limits, 'fields', Infinity) - - let charset - for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var - if (Array.isArray(parsedConType[i]) && - RE_CHARSET.test(parsedConType[i][0])) { - charset = parsedConType[i][1].toLowerCase() - break - } - } - - if (charset === undefined) { charset = cfg.defCharset || 'utf8' } - - this.decoder = new Decoder() - this.charset = charset - this._fields = 0 - this._state = 'key' - this._checkingBytes = true - this._bytesKey = 0 - this._bytesVal = 0 - this._key = '' - this._val = '' - this._keyTrunc = false - this._valTrunc = false - this._hitLimit = false -} - -UrlEncoded.prototype.write = function (data, cb) { - if (this._fields === this.fieldsLimit) { - if (!this.boy.hitFieldsLimit) { - this.boy.hitFieldsLimit = true - this.boy.emit('fieldsLimit') - } - return cb() - } - - let idxeq; let idxamp; let i; let p = 0; const len = data.length - - while (p < len) { - if (this._state === 'key') { - idxeq = idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x3D/* = */) { - idxeq = i - break - } else if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesKey } - } - - if (idxeq !== undefined) { - // key with assignment - if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } - this._state = 'val' - - this._hitLimit = false - this._checkingBytes = true - this._val = '' - this._bytesVal = 0 - this._valTrunc = false - this.decoder.reset() - - p = idxeq + 1 - } else if (idxamp !== undefined) { - // key with no assignment - ++this._fields - let key; const keyTrunc = this._keyTrunc - if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } - - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() - - if (key.length) { - this.boy.emit('field', decodeText(key, 'binary', this.charset), - '', - keyTrunc, - false) - } - - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._keyTrunc = true - } - } else { - if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } - p = len - } - } else { - idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesVal } - } - - if (idxamp !== undefined) { - ++this._fields - if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) - this._state = 'key' - - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() - - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._val === '' && this.fieldSizeLimit === 0) || - (this._bytesVal = this._val.length) === this.fieldSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._valTrunc = true - } - } else { - if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } - p = len - } - } - } - cb() -} - -UrlEncoded.prototype.end = function () { - if (this.boy._done) { return } - - if (this._state === 'key' && this._key.length > 0) { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - '', - this._keyTrunc, - false) - } else if (this._state === 'val') { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) - } - this.boy._done = true - this.boy.emit('finish') -} - -module.exports = UrlEncoded - - -/***/ }), - -/***/ 9730: -/***/ ((module) => { - - - -const RE_PLUS = /\+/g - -const HEX = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 -] - -function Decoder () { - this.buffer = undefined -} -Decoder.prototype.write = function (str) { - // Replace '+' with ' ' before decoding - str = str.replace(RE_PLUS, ' ') - let res = '' - let i = 0; let p = 0; const len = str.length - for (; i < len; ++i) { - if (this.buffer !== undefined) { - if (!HEX[str.charCodeAt(i)]) { - res += '%' + this.buffer - this.buffer = undefined - --i // retry character - } else { - this.buffer += str[i] - ++p - if (this.buffer.length === 2) { - res += String.fromCharCode(parseInt(this.buffer, 16)) - this.buffer = undefined - } - } - } else if (str[i] === '%') { - if (i > p) { - res += str.substring(p, i) - p = i - } - this.buffer = '' - ++p - } - } - if (p < len && this.buffer === undefined) { res += str.substring(p) } - return res -} -Decoder.prototype.reset = function () { - this.buffer = undefined -} - -module.exports = Decoder - - -/***/ }), - -/***/ 496: -/***/ ((module) => { - - - -module.exports = function basename (path) { - if (typeof path !== 'string') { return '' } - for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var - switch (path.charCodeAt(i)) { - case 0x2F: // '/' - case 0x5C: // '\' - path = path.slice(i + 1) - return (path === '..' || path === '.' ? '' : path) - } - } - return (path === '..' || path === '.' ? '' : path) -} - - -/***/ }), - -/***/ 9136: -/***/ ((module) => { - - - -// Node has always utf-8 -const utf8Decoder = new TextDecoder('utf-8') -const textDecoders = new Map([ - ['utf-8', utf8Decoder], - ['utf8', utf8Decoder] -]) - -function decodeText (text, textEncoding, destEncoding) { - if (text) { - if (textDecoders.has(destEncoding)) { - try { - return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) - } catch (e) { } - } else { - try { - textDecoders.set(destEncoding, new TextDecoder(destEncoding)) - return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) - } catch (e) { } - } - } - return text -} - -module.exports = decodeText - - -/***/ }), - -/***/ 9692: -/***/ ((module) => { - - - -module.exports = function getLimit (limits, name, defaultLimit) { - if ( - !limits || - limits[name] === undefined || - limits[name] === null - ) { return defaultLimit } - - if ( - typeof limits[name] !== 'number' || - isNaN(limits[name]) - ) { throw new TypeError('Limit ' + name + ' is not a valid number') } - - return limits[name] -} - - -/***/ }), - -/***/ 4426: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -const decodeText = __nccwpck_require__(9136) - -const RE_ENCODED = /%([a-fA-F0-9]{2})/g - -function encodedReplacer (match, byte) { - return String.fromCharCode(parseInt(byte, 16)) -} - -function parseParams (str) { - const res = [] - let state = 'key' - let charset = '' - let inquote = false - let escaping = false - let p = 0 - let tmp = '' - - for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var - const char = str[i] - if (char === '\\' && inquote) { - if (escaping) { escaping = false } else { - escaping = true - continue - } - } else if (char === '"') { - if (!escaping) { - if (inquote) { - inquote = false - state = 'key' - } else { inquote = true } - continue - } else { escaping = false } - } else { - if (escaping && inquote) { tmp += '\\' } - escaping = false - if ((state === 'charset' || state === 'lang') && char === "'") { - if (state === 'charset') { - state = 'lang' - charset = tmp.substring(1) - } else { state = 'value' } - tmp = '' - continue - } else if (state === 'key' && - (char === '*' || char === '=') && - res.length) { - if (char === '*') { state = 'charset' } else { state = 'value' } - res[p] = [tmp, undefined] - tmp = '' - continue - } else if (!inquote && char === ';') { - state = 'key' - if (charset) { - if (tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } - charset = '' - } else if (tmp.length) { - tmp = decodeText(tmp, 'binary', 'utf8') - } - if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } - tmp = '' - ++p - continue - } else if (!inquote && (char === ' ' || char === '\t')) { continue } - } - tmp += char - } - if (charset && tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } else if (tmp) { - tmp = decodeText(tmp, 'binary', 'utf8') - } - - if (res[p] === undefined) { - if (tmp) { res[p] = tmp } - } else { res[p][1] = tmp } - - return res -} - -module.exports = parseParams - - -/***/ }), - -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(5030); -var beforeAfterHook = __nccwpck_require__(3682); -var request = __nccwpck_require__(6234); -var graphql = __nccwpck_require__(8467); -var authToken = __nccwpck_require__(334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; - } - - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } - } - - return target; -} - -const VERSION = "3.6.0"; - -const _excluded = ["authStrategy"]; -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var isPlainObject = __nccwpck_require__(3287); -var universalUserAgent = __nccwpck_require__(5030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); - -const VERSION = "4.8.0"; - -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); -} - -class GraphqlResponseError extends Error { - constructor(request, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - - this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); - } - - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); - } - } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlResponseError(requestOptions, headers, response.data); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4193: -/***/ ((__unused_webpack_module, exports) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.21.3"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty - if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { - data: [] - }); - } - - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { - done: true - }; - - try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; - } catch (error) { - if (error.status !== 409) throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); - }); -} - -const composePaginateRest = Object.assign(paginate, { - iterator -}); - -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; - -function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3044: -/***/ ((__unused_webpack_module, exports) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - } - - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -const Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], - addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], - deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], - getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], - listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], - listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], - removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], - setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], - setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] - }], - addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] - }], - removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], - getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], - getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], - listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { - renamed: ["codeScanning", "listAlertInstances"] - }], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct"], - getConductCode: ["GET /codes_of_conduct/{key}"] - }, - codespaces: { - addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], - createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], - createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], - createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], - deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], - exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], - getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], - getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], - listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], - listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: ["GET /orgs/{org}/codespaces", {}, { - renamedParameters: { - org_id: "org" - } - }], - listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], - listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], - setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], - startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], - stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], - updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] - }, - dependabot: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], - listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] - }, - dependencyGraph: { - createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], - diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] - }, - emojis: { - get: ["GET /emojis"] - }, - enterpriseAdmin: { - addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], - getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], - getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], - listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], - removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], - setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], - setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { - renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] - }], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], - removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { - renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] - }], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { - renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] - }, - meta: { - get: ["GET /meta"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], - listForAuthenticatedUser: ["GET /user/migrations"], - listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { - renamed: ["migrations", "listReposForAuthenticatedUser"] - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], - deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], - deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], - deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] - }], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] - }], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], - getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], - getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], - getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], - getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - listPackagesForAuthenticatedUser: ["GET /user/packages"], - listPackagesForOrganization: ["GET /orgs/{org}/packages"], - listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], - createCard: ["POST /projects/columns/{column_id}/cards"], - createColumn: ["POST /projects/{project_id}/columns"], - createForAuthenticatedUser: ["POST /user/projects"], - createForOrg: ["POST /orgs/{org}/projects"], - createForRepo: ["POST /repos/{owner}/{repo}/projects"], - delete: ["DELETE /projects/{project_id}"], - deleteCard: ["DELETE /projects/columns/cards/{card_id}"], - deleteColumn: ["DELETE /projects/columns/{column_id}"], - get: ["GET /projects/{project_id}"], - getCard: ["GET /projects/columns/cards/{card_id}"], - getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], - listCards: ["GET /projects/columns/{column_id}/cards"], - listCollaborators: ["GET /projects/{project_id}/collaborators"], - listColumns: ["GET /projects/{project_id}/columns"], - listForOrg: ["GET /orgs/{org}/projects"], - listForRepo: ["GET /repos/{owner}/{repo}/projects"], - listForUser: ["GET /users/{username}/projects"], - moveCard: ["POST /projects/columns/cards/{card_id}/moves"], - moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], - update: ["PATCH /projects/{project_id}"], - updateCard: ["PATCH /projects/columns/cards/{card_id}"], - updateColumn: ["PATCH /projects/columns/{column_id}"] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] - }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], - deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "acceptInvitationForAuthenticatedUser"] - }], - acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], - codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], - createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages"], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "declineInvitationForAuthenticatedUser"] - }], - declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], - deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], - disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], - downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { - renamed: ["repos", "downloadZipballArchive"] - }], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], - enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], - generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], - listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusCheckProtection"] - }], - updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits"], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - secretScanning: { - getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], - listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], - listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], - updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] - }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: ["POST /user/emails", {}, { - renamed: ["users", "addEmailForAuthenticatedUser"] - }], - addEmailForAuthenticatedUser: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { - renamed: ["users", "createGpgKeyForAuthenticatedUser"] - }], - createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { - renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] - }], - createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { - renamed: ["users", "deleteEmailForAuthenticatedUser"] - }], - deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] - }], - deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { - renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] - }], - deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "getGpgKeyForAuthenticatedUser"] - }], - getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { - renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] - }], - getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks", {}, { - renamed: ["users", "listBlockedByAuthenticatedUser"] - }], - listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails", {}, { - renamed: ["users", "listEmailsForAuthenticatedUser"] - }], - listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following", {}, { - renamed: ["users", "listFollowedByAuthenticatedUser"] - }], - listFollowedByAuthenticatedUser: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { - renamed: ["users", "listGpgKeysForAuthenticatedUser"] - }], - listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { - renamed: ["users", "listPublicEmailsForAuthenticatedUser"] - }], - listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { - renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] - }], - listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { - renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] - }], - setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } -}; - -const VERSION = "5.16.2"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } - } - - return newMethods; -} - -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); - } - - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } - - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; - } - - delete options[name]; - } - } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - - return requestWithDefaults(...args); - } - - return Object.assign(withDecorations, requestWithDefaults); -} - -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return { - rest: api - }; -} -restEndpointMethods.VERSION = VERSION; -function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return _objectSpread2(_objectSpread2({}, api), {}, { - rest: api - }); -} -legacyRestEndpointMethods.VERSION = VERSION; - -exports.legacyRestEndpointMethods = legacyRestEndpointMethods; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnceCode = once(deprecation => console.warn(deprecation)); -const logOnceHeaders = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - let headers; - - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; - } - - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; - } // redact request credentials without mutating original request options - - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; // deprecations - - Object.defineProperty(this, "code", { - get() { - logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); - return headers || {}; - } - - }); - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); - -const VERSION = "5.6.3"; - -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -function fetchWrapper(requestOptions) { - const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, // `requestOptions.request.agent` type is incompatible - // see https://github.com/octokit/types.ts/pull/264 - requestOptions.request)).then(async response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - - if ("deprecation" in headers) { - const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); - } - - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests - - - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - - throw new requestError.RequestError(response.statusText, status, { - response: { - url, - status, - headers, - data: undefined - }, - request: requestOptions - }); - } - - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - response: { - url, - status, - headers, - data: await getResponseData(response) - }, - request: requestOptions - }); - } - - if (status >= 400) { - const data = await getResponseData(response); - const error = new requestError.RequestError(toErrorMessage(data), status, { - response: { - url, - status, - headers, - data - }, - request: requestOptions - }); - throw error; - } - - return getResponseData(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) throw error; - throw new requestError.RequestError(error.message, 500, { - request: requestOptions - }); - }); -} - -async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); -} - -function toErrorMessage(data) { - if (typeof data === "string") return data; // istanbul ignore else - just in case - - if ("message" in data) { - if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; - } - - return data.message; - } // istanbul ignore next - just in case - - - return `Unknown error: ${JSON.stringify(data)}`; -} - -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); - } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; - - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} - -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); - -exports.request = request; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = -{ - parallel : __nccwpck_require__(8210), - serial : __nccwpck_require__(445), - serialOrdered : __nccwpck_require__(3578) -}; - - -/***/ }), - -/***/ 1700: -/***/ ((module) => { - -// API -module.exports = abort; - -/** - * Aborts leftover active jobs - * - * @param {object} state - current state object - */ -function abort(state) -{ - Object.keys(state.jobs).forEach(clean.bind(state)); - - // reset leftover jobs - state.jobs = {}; -} - -/** - * Cleans up leftover job by invoking abort function for the provided job id - * - * @this state - * @param {string|number} key - job id to abort - */ -function clean(key) -{ - if (typeof this.jobs[key] == 'function') - { - this.jobs[key](); - } -} - - -/***/ }), - -/***/ 2794: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var defer = __nccwpck_require__(5295); - -// API -module.exports = async; - -/** - * Runs provided callback asynchronously - * even if callback itself is not - * - * @param {function} callback - callback to invoke - * @returns {function} - augmented callback - */ -function async(callback) -{ - var isAsync = false; - - // check if async happened - defer(function() { isAsync = true; }); - - return function async_callback(err, result) - { - if (isAsync) - { - callback(err, result); - } - else - { - defer(function nextTick_callback() - { - callback(err, result); - }); - } - }; -} - - -/***/ }), - -/***/ 5295: -/***/ ((module) => { - -module.exports = defer; - -/** - * Runs provided function on next iteration of the event loop - * - * @param {function} fn - function to run - */ -function defer(fn) -{ - var nextTick = typeof setImmediate == 'function' - ? setImmediate - : ( - typeof process == 'object' && typeof process.nextTick == 'function' - ? process.nextTick - : null - ); - - if (nextTick) - { - nextTick(fn); - } - else - { - setTimeout(fn, 0); - } -} - - -/***/ }), - -/***/ 9023: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var async = __nccwpck_require__(2794) - , abort = __nccwpck_require__(1700) - ; - -// API -module.exports = iterate; - -/** - * Iterates over each job object - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {object} state - current job status - * @param {function} callback - invoked when all elements processed - */ -function iterate(list, iterator, state, callback) -{ - // store current index - var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; - - state.jobs[key] = runJob(iterator, key, list[key], function(error, output) - { - // don't repeat yourself - // skip secondary callbacks - if (!(key in state.jobs)) - { - return; - } - - // clean up jobs - delete state.jobs[key]; - - if (error) - { - // don't process rest of the results - // stop still active jobs - // and reset the list - abort(state); - } - else - { - state.results[key] = output; - } - - // return salvaged results - callback(error, state.results); - }); -} - -/** - * Runs iterator over provided job element - * - * @param {function} iterator - iterator to invoke - * @param {string|number} key - key/index of the element in the list of jobs - * @param {mixed} item - job description - * @param {function} callback - invoked after iterator is done with the job - * @returns {function|mixed} - job abort function or something else - */ -function runJob(iterator, key, item, callback) -{ - var aborter; - - // allow shortcut if iterator expects only two arguments - if (iterator.length == 2) - { - aborter = iterator(item, async(callback)); - } - // otherwise go with full three arguments - else - { - aborter = iterator(item, key, async(callback)); - } - - return aborter; -} - - -/***/ }), - -/***/ 2474: -/***/ ((module) => { - -// API -module.exports = state; - -/** - * Creates initial state object - * for iteration over list - * - * @param {array|object} list - list to iterate over - * @param {function|null} sortMethod - function to use for keys sort, - * or `null` to keep them as is - * @returns {object} - initial state object - */ -function state(list, sortMethod) -{ - var isNamedList = !Array.isArray(list) - , initState = - { - index : 0, - keyedList: isNamedList || sortMethod ? Object.keys(list) : null, - jobs : {}, - results : isNamedList ? {} : [], - size : isNamedList ? Object.keys(list).length : list.length - } - ; - - if (sortMethod) - { - // sort array keys based on it's values - // sort object's keys just on own merit - initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) - { - return sortMethod(list[a], list[b]); - }); - } - - return initState; -} - - -/***/ }), - -/***/ 7942: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var abort = __nccwpck_require__(1700) - , async = __nccwpck_require__(2794) - ; - -// API -module.exports = terminator; - -/** - * Terminates jobs in the attached state context - * - * @this AsyncKitState# - * @param {function} callback - final callback to invoke after termination - */ -function terminator(callback) -{ - if (!Object.keys(this.jobs).length) - { - return; - } - - // fast forward iteration index - this.index = this.size; - - // abort jobs - abort(this); - - // send back results we have so far - async(callback)(null, this.results); -} - - -/***/ }), - -/***/ 8210: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(2474) - , terminator = __nccwpck_require__(7942) - ; - -// Public API -module.exports = parallel; - -/** - * Runs iterator over provided array elements in parallel - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function parallel(list, iterator, callback) -{ - var state = initState(list); - - while (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, function(error, result) - { - if (error) - { - callback(error, result); - return; - } - - // looks like it's the last one - if (Object.keys(state.jobs).length === 0) - { - callback(null, state.results); - return; - } - }); - - state.index++; - } - - return terminator.bind(state, callback); -} - - -/***/ }), - -/***/ 445: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var serialOrdered = __nccwpck_require__(3578); - -// Public API -module.exports = serial; - -/** - * Runs iterator over provided array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serial(list, iterator, callback) -{ - return serialOrdered(list, iterator, null, callback); -} - - -/***/ }), - -/***/ 3578: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(2474) - , terminator = __nccwpck_require__(7942) - ; - -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; - -/** - * Runs iterator over provided sorted array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); - - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; - } - - state.index++; - - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; - } - - // done here - callback(null, state.results); - }); - - return terminator.bind(state, callback); -} - -/* - * -- Sort methods - */ - -/** - * sort helper to sort array elements in ascending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function ascending(a, b) -{ - return a < b ? -1 : a > b ? 1 : 0; -} - -/** - * sort helper to sort array elements in descending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function descending(a, b) -{ - return -1 * ascending(a, b); -} - - -/***/ }), - -/***/ 6545: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = __nccwpck_require__(2618); - -/***/ }), - -/***/ 8104: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); -var settle = __nccwpck_require__(3211); -var buildFullPath = __nccwpck_require__(1934); -var buildURL = __nccwpck_require__(646); -var http = __nccwpck_require__(3685); -var https = __nccwpck_require__(5687); -var httpFollow = (__nccwpck_require__(7707).http); -var httpsFollow = (__nccwpck_require__(7707).https); -var url = __nccwpck_require__(7310); -var zlib = __nccwpck_require__(9796); -var VERSION = (__nccwpck_require__(4322).version); -var transitionalDefaults = __nccwpck_require__(936); -var AxiosError = __nccwpck_require__(2093); -var CanceledError = __nccwpck_require__(4098); - -var isHttps = /https:?/; - -var supportedProtocols = [ 'http:', 'https:', 'file:' ]; - -/** - * - * @param {http.ClientRequestArgs} options - * @param {AxiosProxyConfig} proxy - * @param {string} location - */ -function setProxy(options, proxy, location) { - options.hostname = proxy.host; - options.host = proxy.host; - options.port = proxy.port; - options.path = location; - - // Basic proxy authorization - if (proxy.auth) { - var base64 = Buffer.from(proxy.auth.username + ':' + proxy.auth.password, 'utf8').toString('base64'); - options.headers['Proxy-Authorization'] = 'Basic ' + base64; - } - - // If a proxy is used, any redirects must also pass through the proxy - options.beforeRedirect = function beforeRedirect(redirection) { - redirection.headers.host = redirection.host; - setProxy(redirection, proxy, redirection.href); - }; -} - -/*eslint consistent-return:0*/ -module.exports = function httpAdapter(config) { - return new Promise(function dispatchHttpRequest(resolvePromise, rejectPromise) { - var onCanceled; - function done() { - if (config.cancelToken) { - config.cancelToken.unsubscribe(onCanceled); - } - - if (config.signal) { - config.signal.removeEventListener('abort', onCanceled); - } - } - var resolve = function resolve(value) { - done(); - resolvePromise(value); - }; - var rejected = false; - var reject = function reject(value) { - done(); - rejected = true; - rejectPromise(value); - }; - var data = config.data; - var headers = config.headers; - var headerNames = {}; - - Object.keys(headers).forEach(function storeLowerName(name) { - headerNames[name.toLowerCase()] = name; - }); - - // Set User-Agent (required by some servers) - // See https://github.com/axios/axios/issues/69 - if ('user-agent' in headerNames) { - // User-Agent is specified; handle case where no UA header is desired - if (!headers[headerNames['user-agent']]) { - delete headers[headerNames['user-agent']]; - } - // Otherwise, use specified value - } else { - // Only set header if it hasn't been set in config - headers['User-Agent'] = 'axios/' + VERSION; - } - - // support for https://www.npmjs.com/package/form-data api - if (utils.isFormData(data) && utils.isFunction(data.getHeaders)) { - Object.assign(headers, data.getHeaders()); - } else if (data && !utils.isStream(data)) { - if (Buffer.isBuffer(data)) { - // Nothing to do... - } else if (utils.isArrayBuffer(data)) { - data = Buffer.from(new Uint8Array(data)); - } else if (utils.isString(data)) { - data = Buffer.from(data, 'utf-8'); - } else { - return reject(new AxiosError( - 'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream', - AxiosError.ERR_BAD_REQUEST, - config - )); - } - - if (config.maxBodyLength > -1 && data.length > config.maxBodyLength) { - return reject(new AxiosError( - 'Request body larger than maxBodyLength limit', - AxiosError.ERR_BAD_REQUEST, - config - )); - } - - // Add Content-Length header if data exists - if (!headerNames['content-length']) { - headers['Content-Length'] = data.length; - } - } - - // HTTP basic authentication - var auth = undefined; - if (config.auth) { - var username = config.auth.username || ''; - var password = config.auth.password || ''; - auth = username + ':' + password; - } - - // Parse url - var fullPath = buildFullPath(config.baseURL, config.url); - var parsed = url.parse(fullPath); - var protocol = parsed.protocol || supportedProtocols[0]; - - if (supportedProtocols.indexOf(protocol) === -1) { - return reject(new AxiosError( - 'Unsupported protocol ' + protocol, - AxiosError.ERR_BAD_REQUEST, - config - )); - } - - if (!auth && parsed.auth) { - var urlAuth = parsed.auth.split(':'); - var urlUsername = urlAuth[0] || ''; - var urlPassword = urlAuth[1] || ''; - auth = urlUsername + ':' + urlPassword; - } - - if (auth && headerNames.authorization) { - delete headers[headerNames.authorization]; - } - - var isHttpsRequest = isHttps.test(protocol); - var agent = isHttpsRequest ? config.httpsAgent : config.httpAgent; - - try { - buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''); - } catch (err) { - var customErr = new Error(err.message); - customErr.config = config; - customErr.url = config.url; - customErr.exists = true; - reject(customErr); - } - - var options = { - path: buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''), - method: config.method.toUpperCase(), - headers: headers, - agent: agent, - agents: { http: config.httpAgent, https: config.httpsAgent }, - auth: auth - }; - - if (config.socketPath) { - options.socketPath = config.socketPath; - } else { - options.hostname = parsed.hostname; - options.port = parsed.port; - } - - var proxy = config.proxy; - if (!proxy && proxy !== false) { - var proxyEnv = protocol.slice(0, -1) + '_proxy'; - var proxyUrl = process.env[proxyEnv] || process.env[proxyEnv.toUpperCase()]; - if (proxyUrl) { - var parsedProxyUrl = url.parse(proxyUrl); - var noProxyEnv = process.env.no_proxy || process.env.NO_PROXY; - var shouldProxy = true; - - if (noProxyEnv) { - var noProxy = noProxyEnv.split(',').map(function trim(s) { - return s.trim(); - }); - - shouldProxy = !noProxy.some(function proxyMatch(proxyElement) { - if (!proxyElement) { - return false; - } - if (proxyElement === '*') { - return true; - } - if (proxyElement[0] === '.' && - parsed.hostname.substr(parsed.hostname.length - proxyElement.length) === proxyElement) { - return true; - } - - return parsed.hostname === proxyElement; - }); - } - - if (shouldProxy) { - proxy = { - host: parsedProxyUrl.hostname, - port: parsedProxyUrl.port, - protocol: parsedProxyUrl.protocol - }; - - if (parsedProxyUrl.auth) { - var proxyUrlAuth = parsedProxyUrl.auth.split(':'); - proxy.auth = { - username: proxyUrlAuth[0], - password: proxyUrlAuth[1] - }; - } - } - } - } - - if (proxy) { - options.headers.host = parsed.hostname + (parsed.port ? ':' + parsed.port : ''); - setProxy(options, proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path); - } - - var transport; - var isHttpsProxy = isHttpsRequest && (proxy ? isHttps.test(proxy.protocol) : true); - if (config.transport) { - transport = config.transport; - } else if (config.maxRedirects === 0) { - transport = isHttpsProxy ? https : http; - } else { - if (config.maxRedirects) { - options.maxRedirects = config.maxRedirects; - } - if (config.beforeRedirect) { - options.beforeRedirect = config.beforeRedirect; - } - transport = isHttpsProxy ? httpsFollow : httpFollow; - } - - if (config.maxBodyLength > -1) { - options.maxBodyLength = config.maxBodyLength; - } - - if (config.insecureHTTPParser) { - options.insecureHTTPParser = config.insecureHTTPParser; - } - - // Create the request - var req = transport.request(options, function handleResponse(res) { - if (req.aborted) return; - - // uncompress the response body transparently if required - var stream = res; - - // return the last request in case of redirects - var lastRequest = res.req || req; - - - // if no content, is HEAD request or decompress disabled we should not decompress - if (res.statusCode !== 204 && lastRequest.method !== 'HEAD' && config.decompress !== false) { - switch (res.headers['content-encoding']) { - /*eslint default-case:0*/ - case 'gzip': - case 'compress': - case 'deflate': - // add the unzipper to the body stream processing pipeline - stream = stream.pipe(zlib.createUnzip()); - - // remove the content-encoding in order to not confuse downstream operations - delete res.headers['content-encoding']; - break; - } - } - - var response = { - status: res.statusCode, - statusText: res.statusMessage, - headers: res.headers, - config: config, - request: lastRequest - }; - - if (config.responseType === 'stream') { - response.data = stream; - settle(resolve, reject, response); - } else { - var responseBuffer = []; - var totalResponseBytes = 0; - stream.on('data', function handleStreamData(chunk) { - responseBuffer.push(chunk); - totalResponseBytes += chunk.length; - - // make sure the content length is not over the maxContentLength if specified - if (config.maxContentLength > -1 && totalResponseBytes > config.maxContentLength) { - // stream.destoy() emit aborted event before calling reject() on Node.js v16 - rejected = true; - stream.destroy(); - reject(new AxiosError('maxContentLength size of ' + config.maxContentLength + ' exceeded', - AxiosError.ERR_BAD_RESPONSE, config, lastRequest)); - } - }); - - stream.on('aborted', function handlerStreamAborted() { - if (rejected) { - return; - } - stream.destroy(); - reject(new AxiosError( - 'maxContentLength size of ' + config.maxContentLength + ' exceeded', - AxiosError.ERR_BAD_RESPONSE, - config, - lastRequest - )); - }); - - stream.on('error', function handleStreamError(err) { - if (req.aborted) return; - reject(AxiosError.from(err, null, config, lastRequest)); - }); - - stream.on('end', function handleStreamEnd() { - try { - var responseData = responseBuffer.length === 1 ? responseBuffer[0] : Buffer.concat(responseBuffer); - if (config.responseType !== 'arraybuffer') { - responseData = responseData.toString(config.responseEncoding); - if (!config.responseEncoding || config.responseEncoding === 'utf8') { - responseData = utils.stripBOM(responseData); - } - } - response.data = responseData; - } catch (err) { - reject(AxiosError.from(err, null, config, response.request, response)); - } - settle(resolve, reject, response); - }); - } - }); - - // Handle errors - req.on('error', function handleRequestError(err) { - // @todo remove - // if (req.aborted && err.code !== AxiosError.ERR_FR_TOO_MANY_REDIRECTS) return; - reject(AxiosError.from(err, null, config, req)); - }); - - // set tcp keep alive to prevent drop connection by peer - req.on('socket', function handleRequestSocket(socket) { - // default interval of sending ack packet is 1 minute - socket.setKeepAlive(true, 1000 * 60); - }); - - // Handle request timeout - if (config.timeout) { - // This is forcing a int timeout to avoid problems if the `req` interface doesn't handle other types. - var timeout = parseInt(config.timeout, 10); - - if (isNaN(timeout)) { - reject(new AxiosError( - 'error trying to parse `config.timeout` to int', - AxiosError.ERR_BAD_OPTION_VALUE, - config, - req - )); - - return; - } - - // Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system. - // And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET. - // At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up. - // And then these socket which be hang up will devoring CPU little by little. - // ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect. - req.setTimeout(timeout, function handleRequestTimeout() { - req.abort(); - var transitional = config.transitional || transitionalDefaults; - reject(new AxiosError( - 'timeout of ' + timeout + 'ms exceeded', - transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED, - config, - req - )); - }); - } - - if (config.cancelToken || config.signal) { - // Handle cancellation - // eslint-disable-next-line func-names - onCanceled = function(cancel) { - if (req.aborted) return; - - req.abort(); - reject(!cancel || (cancel && cancel.type) ? new CanceledError() : cancel); - }; - - config.cancelToken && config.cancelToken.subscribe(onCanceled); - if (config.signal) { - config.signal.aborted ? onCanceled() : config.signal.addEventListener('abort', onCanceled); - } - } - - - // Send the request - if (utils.isStream(data)) { - data.on('error', function handleStreamError(err) { - reject(AxiosError.from(err, config, null, req)); - }).pipe(req); - } else { - req.end(data); - } - }); -}; - - -/***/ }), - -/***/ 3454: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); -var settle = __nccwpck_require__(3211); -var cookies = __nccwpck_require__(1545); -var buildURL = __nccwpck_require__(646); -var buildFullPath = __nccwpck_require__(1934); -var parseHeaders = __nccwpck_require__(6455); -var isURLSameOrigin = __nccwpck_require__(3608); -var transitionalDefaults = __nccwpck_require__(936); -var AxiosError = __nccwpck_require__(2093); -var CanceledError = __nccwpck_require__(4098); -var parseProtocol = __nccwpck_require__(6107); - -module.exports = function xhrAdapter(config) { - return new Promise(function dispatchXhrRequest(resolve, reject) { - var requestData = config.data; - var requestHeaders = config.headers; - var responseType = config.responseType; - var onCanceled; - function done() { - if (config.cancelToken) { - config.cancelToken.unsubscribe(onCanceled); - } - - if (config.signal) { - config.signal.removeEventListener('abort', onCanceled); - } - } - - if (utils.isFormData(requestData) && utils.isStandardBrowserEnv()) { - delete requestHeaders['Content-Type']; // Let the browser set it - } - - var request = new XMLHttpRequest(); - - // HTTP basic authentication - if (config.auth) { - var username = config.auth.username || ''; - var password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : ''; - requestHeaders.Authorization = 'Basic ' + btoa(username + ':' + password); - } - - var fullPath = buildFullPath(config.baseURL, config.url); - - request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true); - - // Set the request timeout in MS - request.timeout = config.timeout; - - function onloadend() { - if (!request) { - return; - } - // Prepare the response - var responseHeaders = 'getAllResponseHeaders' in request ? parseHeaders(request.getAllResponseHeaders()) : null; - var responseData = !responseType || responseType === 'text' || responseType === 'json' ? - request.responseText : request.response; - var response = { - data: responseData, - status: request.status, - statusText: request.statusText, - headers: responseHeaders, - config: config, - request: request - }; - - settle(function _resolve(value) { - resolve(value); - done(); - }, function _reject(err) { - reject(err); - done(); - }, response); - - // Clean up request - request = null; - } - - if ('onloadend' in request) { - // Use onloadend if available - request.onloadend = onloadend; - } else { - // Listen for ready state to emulate onloadend - request.onreadystatechange = function handleLoad() { - if (!request || request.readyState !== 4) { - return; - } - - // The request errored out and we didn't get a response, this will be - // handled by onerror instead - // With one exception: request that using file: protocol, most browsers - // will return status as 0 even though it's a successful request - if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) { - return; - } - // readystate handler is calling before onerror or ontimeout handlers, - // so we should call onloadend on the next 'tick' - setTimeout(onloadend); - }; - } - - // Handle browser request cancellation (as opposed to a manual cancellation) - request.onabort = function handleAbort() { - if (!request) { - return; - } - - reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, config, request)); - - // Clean up request - request = null; - }; - - // Handle low level network errors - request.onerror = function handleError() { - // Real errors are hidden from us by the browser - // onerror should only fire if it's a network error - reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request, request)); - - // Clean up request - request = null; - }; - - // Handle timeout - request.ontimeout = function handleTimeout() { - var timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded'; - var transitional = config.transitional || transitionalDefaults; - if (config.timeoutErrorMessage) { - timeoutErrorMessage = config.timeoutErrorMessage; - } - reject(new AxiosError( - timeoutErrorMessage, - transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED, - config, - request)); - - // Clean up request - request = null; - }; - - // Add xsrf header - // This is only done if running in a standard browser environment. - // Specifically not if we're in a web worker, or react-native. - if (utils.isStandardBrowserEnv()) { - // Add xsrf header - var xsrfValue = (config.withCredentials || isURLSameOrigin(fullPath)) && config.xsrfCookieName ? - cookies.read(config.xsrfCookieName) : - undefined; - - if (xsrfValue) { - requestHeaders[config.xsrfHeaderName] = xsrfValue; - } - } - - // Add headers to the request - if ('setRequestHeader' in request) { - utils.forEach(requestHeaders, function setRequestHeader(val, key) { - if (typeof requestData === 'undefined' && key.toLowerCase() === 'content-type') { - // Remove Content-Type if data is undefined - delete requestHeaders[key]; - } else { - // Otherwise add header to the request - request.setRequestHeader(key, val); - } - }); - } - - // Add withCredentials to request if needed - if (!utils.isUndefined(config.withCredentials)) { - request.withCredentials = !!config.withCredentials; - } - - // Add responseType to request if needed - if (responseType && responseType !== 'json') { - request.responseType = config.responseType; - } - - // Handle progress if needed - if (typeof config.onDownloadProgress === 'function') { - request.addEventListener('progress', config.onDownloadProgress); - } - - // Not all browsers support upload events - if (typeof config.onUploadProgress === 'function' && request.upload) { - request.upload.addEventListener('progress', config.onUploadProgress); - } - - if (config.cancelToken || config.signal) { - // Handle cancellation - // eslint-disable-next-line func-names - onCanceled = function(cancel) { - if (!request) { - return; - } - reject(!cancel || (cancel && cancel.type) ? new CanceledError() : cancel); - request.abort(); - request = null; - }; - - config.cancelToken && config.cancelToken.subscribe(onCanceled); - if (config.signal) { - config.signal.aborted ? onCanceled() : config.signal.addEventListener('abort', onCanceled); - } - } - - if (!requestData) { - requestData = null; - } - - var protocol = parseProtocol(fullPath); - - if (protocol && [ 'http', 'https', 'file' ].indexOf(protocol) === -1) { - reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config)); - return; - } - - - // Send the request - request.send(requestData); - }); -}; - - -/***/ }), - -/***/ 2618: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); -var bind = __nccwpck_require__(7065); -var Axios = __nccwpck_require__(8178); -var mergeConfig = __nccwpck_require__(4831); -var defaults = __nccwpck_require__(1626); - -/** - * Create an instance of Axios - * - * @param {Object} defaultConfig The default config for the instance - * @return {Axios} A new instance of Axios - */ -function createInstance(defaultConfig) { - var context = new Axios(defaultConfig); - var instance = bind(Axios.prototype.request, context); - - // Copy axios.prototype to instance - utils.extend(instance, Axios.prototype, context); - - // Copy context to instance - utils.extend(instance, context); - - // Factory for creating new instances - instance.create = function create(instanceConfig) { - return createInstance(mergeConfig(defaultConfig, instanceConfig)); - }; - - return instance; -} - -// Create the default instance to be exported -var axios = createInstance(defaults); - -// Expose Axios class to allow class inheritance -axios.Axios = Axios; - -// Expose Cancel & CancelToken -axios.CanceledError = __nccwpck_require__(4098); -axios.CancelToken = __nccwpck_require__(1587); -axios.isCancel = __nccwpck_require__(4057); -axios.VERSION = (__nccwpck_require__(4322).version); -axios.toFormData = __nccwpck_require__(470); - -// Expose AxiosError class -axios.AxiosError = __nccwpck_require__(2093); - -// alias for CanceledError for backward compatibility -axios.Cancel = axios.CanceledError; - -// Expose all/spread -axios.all = function all(promises) { - return Promise.all(promises); -}; -axios.spread = __nccwpck_require__(4850); - -// Expose isAxiosError -axios.isAxiosError = __nccwpck_require__(650); - -module.exports = axios; - -// Allow use of default import syntax in TypeScript -module.exports["default"] = axios; - - -/***/ }), - -/***/ 1587: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var CanceledError = __nccwpck_require__(4098); - -/** - * A `CancelToken` is an object that can be used to request cancellation of an operation. - * - * @class - * @param {Function} executor The executor function. - */ -function CancelToken(executor) { - if (typeof executor !== 'function') { - throw new TypeError('executor must be a function.'); - } - - var resolvePromise; - - this.promise = new Promise(function promiseExecutor(resolve) { - resolvePromise = resolve; - }); - - var token = this; - - // eslint-disable-next-line func-names - this.promise.then(function(cancel) { - if (!token._listeners) return; - - var i; - var l = token._listeners.length; - - for (i = 0; i < l; i++) { - token._listeners[i](cancel); - } - token._listeners = null; - }); - - // eslint-disable-next-line func-names - this.promise.then = function(onfulfilled) { - var _resolve; - // eslint-disable-next-line func-names - var promise = new Promise(function(resolve) { - token.subscribe(resolve); - _resolve = resolve; - }).then(onfulfilled); - - promise.cancel = function reject() { - token.unsubscribe(_resolve); - }; - - return promise; - }; - - executor(function cancel(message) { - if (token.reason) { - // Cancellation has already been requested - return; - } - - token.reason = new CanceledError(message); - resolvePromise(token.reason); - }); -} - -/** - * Throws a `CanceledError` if cancellation has been requested. - */ -CancelToken.prototype.throwIfRequested = function throwIfRequested() { - if (this.reason) { - throw this.reason; - } -}; - -/** - * Subscribe to the cancel signal - */ - -CancelToken.prototype.subscribe = function subscribe(listener) { - if (this.reason) { - listener(this.reason); - return; - } - - if (this._listeners) { - this._listeners.push(listener); - } else { - this._listeners = [listener]; - } -}; - -/** - * Unsubscribe from the cancel signal - */ - -CancelToken.prototype.unsubscribe = function unsubscribe(listener) { - if (!this._listeners) { - return; - } - var index = this._listeners.indexOf(listener); - if (index !== -1) { - this._listeners.splice(index, 1); - } -}; - -/** - * Returns an object that contains a new `CancelToken` and a function that, when called, - * cancels the `CancelToken`. - */ -CancelToken.source = function source() { - var cancel; - var token = new CancelToken(function executor(c) { - cancel = c; - }); - return { - token: token, - cancel: cancel - }; -}; - -module.exports = CancelToken; - - -/***/ }), - -/***/ 4098: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var AxiosError = __nccwpck_require__(2093); -var utils = __nccwpck_require__(328); - -/** - * A `CanceledError` is an object that is thrown when an operation is canceled. - * - * @class - * @param {string=} message The message. - */ -function CanceledError(message) { - // eslint-disable-next-line no-eq-null,eqeqeq - AxiosError.call(this, message == null ? 'canceled' : message, AxiosError.ERR_CANCELED); - this.name = 'CanceledError'; -} - -utils.inherits(CanceledError, AxiosError, { - __CANCEL__: true -}); - -module.exports = CanceledError; - - -/***/ }), - -/***/ 4057: -/***/ ((module) => { - - - -module.exports = function isCancel(value) { - return !!(value && value.__CANCEL__); -}; - - -/***/ }), - -/***/ 8178: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); -var buildURL = __nccwpck_require__(646); -var InterceptorManager = __nccwpck_require__(3214); -var dispatchRequest = __nccwpck_require__(5062); -var mergeConfig = __nccwpck_require__(4831); -var buildFullPath = __nccwpck_require__(1934); -var validator = __nccwpck_require__(1632); - -var validators = validator.validators; -/** - * Create a new instance of Axios - * - * @param {Object} instanceConfig The default config for the instance - */ -function Axios(instanceConfig) { - this.defaults = instanceConfig; - this.interceptors = { - request: new InterceptorManager(), - response: new InterceptorManager() - }; -} - -/** - * Dispatch a request - * - * @param {Object} config The config specific for this request (merged with this.defaults) - */ -Axios.prototype.request = function request(configOrUrl, config) { - /*eslint no-param-reassign:0*/ - // Allow for axios('example/url'[, config]) a la fetch API - if (typeof configOrUrl === 'string') { - config = config || {}; - config.url = configOrUrl; - } else { - config = configOrUrl || {}; - } - - config = mergeConfig(this.defaults, config); - - // Set config.method - if (config.method) { - config.method = config.method.toLowerCase(); - } else if (this.defaults.method) { - config.method = this.defaults.method.toLowerCase(); - } else { - config.method = 'get'; - } - - var transitional = config.transitional; - - if (transitional !== undefined) { - validator.assertOptions(transitional, { - silentJSONParsing: validators.transitional(validators.boolean), - forcedJSONParsing: validators.transitional(validators.boolean), - clarifyTimeoutError: validators.transitional(validators.boolean) - }, false); - } - - // filter out skipped interceptors - var requestInterceptorChain = []; - var synchronousRequestInterceptors = true; - this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) { - if (typeof interceptor.runWhen === 'function' && interceptor.runWhen(config) === false) { - return; - } - - synchronousRequestInterceptors = synchronousRequestInterceptors && interceptor.synchronous; - - requestInterceptorChain.unshift(interceptor.fulfilled, interceptor.rejected); - }); - - var responseInterceptorChain = []; - this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) { - responseInterceptorChain.push(interceptor.fulfilled, interceptor.rejected); - }); - - var promise; - - if (!synchronousRequestInterceptors) { - var chain = [dispatchRequest, undefined]; - - Array.prototype.unshift.apply(chain, requestInterceptorChain); - chain = chain.concat(responseInterceptorChain); - - promise = Promise.resolve(config); - while (chain.length) { - promise = promise.then(chain.shift(), chain.shift()); - } - - return promise; - } - - - var newConfig = config; - while (requestInterceptorChain.length) { - var onFulfilled = requestInterceptorChain.shift(); - var onRejected = requestInterceptorChain.shift(); - try { - newConfig = onFulfilled(newConfig); - } catch (error) { - onRejected(error); - break; - } - } - - try { - promise = dispatchRequest(newConfig); - } catch (error) { - return Promise.reject(error); - } - - while (responseInterceptorChain.length) { - promise = promise.then(responseInterceptorChain.shift(), responseInterceptorChain.shift()); - } - - return promise; -}; - -Axios.prototype.getUri = function getUri(config) { - config = mergeConfig(this.defaults, config); - var fullPath = buildFullPath(config.baseURL, config.url); - return buildURL(fullPath, config.params, config.paramsSerializer); -}; - -// Provide aliases for supported request methods -utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) { - /*eslint func-names:0*/ - Axios.prototype[method] = function(url, config) { - return this.request(mergeConfig(config || {}, { - method: method, - url: url, - data: (config || {}).data - })); - }; -}); - -utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { - /*eslint func-names:0*/ - - function generateHTTPMethod(isForm) { - return function httpMethod(url, data, config) { - return this.request(mergeConfig(config || {}, { - method: method, - headers: isForm ? { - 'Content-Type': 'multipart/form-data' - } : {}, - url: url, - data: data - })); - }; - } - - Axios.prototype[method] = generateHTTPMethod(); - - Axios.prototype[method + 'Form'] = generateHTTPMethod(true); -}); - -module.exports = Axios; - - -/***/ }), - -/***/ 2093: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -/** - * Create an Error with the specified message, config, error code, request and response. - * - * @param {string} message The error message. - * @param {string} [code] The error code (for example, 'ECONNABORTED'). - * @param {Object} [config] The config. - * @param {Object} [request] The request. - * @param {Object} [response] The response. - * @returns {Error} The created error. - */ -function AxiosError(message, code, config, request, response) { - Error.call(this); - this.message = message; - this.name = 'AxiosError'; - code && (this.code = code); - config && (this.config = config); - request && (this.request = request); - response && (this.response = response); -} - -utils.inherits(AxiosError, Error, { - toJSON: function toJSON() { - return { - // Standard - message: this.message, - name: this.name, - // Microsoft - description: this.description, - number: this.number, - // Mozilla - fileName: this.fileName, - lineNumber: this.lineNumber, - columnNumber: this.columnNumber, - stack: this.stack, - // Axios - config: this.config, - code: this.code, - status: this.response && this.response.status ? this.response.status : null - }; - } -}); - -var prototype = AxiosError.prototype; -var descriptors = {}; - -[ - 'ERR_BAD_OPTION_VALUE', - 'ERR_BAD_OPTION', - 'ECONNABORTED', - 'ETIMEDOUT', - 'ERR_NETWORK', - 'ERR_FR_TOO_MANY_REDIRECTS', - 'ERR_DEPRECATED', - 'ERR_BAD_RESPONSE', - 'ERR_BAD_REQUEST', - 'ERR_CANCELED' -// eslint-disable-next-line func-names -].forEach(function(code) { - descriptors[code] = {value: code}; -}); - -Object.defineProperties(AxiosError, descriptors); -Object.defineProperty(prototype, 'isAxiosError', {value: true}); - -// eslint-disable-next-line func-names -AxiosError.from = function(error, code, config, request, response, customProps) { - var axiosError = Object.create(prototype); - - utils.toFlatObject(error, axiosError, function filter(obj) { - return obj !== Error.prototype; - }); - - AxiosError.call(axiosError, error.message, code, config, request, response); - - axiosError.name = error.name; - - customProps && Object.assign(axiosError, customProps); - - return axiosError; -}; - -module.exports = AxiosError; - - -/***/ }), - -/***/ 3214: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -function InterceptorManager() { - this.handlers = []; -} - -/** - * Add a new interceptor to the stack - * - * @param {Function} fulfilled The function to handle `then` for a `Promise` - * @param {Function} rejected The function to handle `reject` for a `Promise` - * - * @return {Number} An ID used to remove interceptor later - */ -InterceptorManager.prototype.use = function use(fulfilled, rejected, options) { - this.handlers.push({ - fulfilled: fulfilled, - rejected: rejected, - synchronous: options ? options.synchronous : false, - runWhen: options ? options.runWhen : null - }); - return this.handlers.length - 1; -}; - -/** - * Remove an interceptor from the stack - * - * @param {Number} id The ID that was returned by `use` - */ -InterceptorManager.prototype.eject = function eject(id) { - if (this.handlers[id]) { - this.handlers[id] = null; - } -}; - -/** - * Iterate over all the registered interceptors - * - * This method is particularly useful for skipping over any - * interceptors that may have become `null` calling `eject`. - * - * @param {Function} fn The function to call for each interceptor - */ -InterceptorManager.prototype.forEach = function forEach(fn) { - utils.forEach(this.handlers, function forEachHandler(h) { - if (h !== null) { - fn(h); - } - }); -}; - -module.exports = InterceptorManager; - - -/***/ }), - -/***/ 1934: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var isAbsoluteURL = __nccwpck_require__(1301); -var combineURLs = __nccwpck_require__(7189); - -/** - * Creates a new URL by combining the baseURL with the requestedURL, - * only when the requestedURL is not already an absolute URL. - * If the requestURL is absolute, this function returns the requestedURL untouched. - * - * @param {string} baseURL The base URL - * @param {string} requestedURL Absolute or relative URL to combine - * @returns {string} The combined full path - */ -module.exports = function buildFullPath(baseURL, requestedURL) { - if (baseURL && !isAbsoluteURL(requestedURL)) { - return combineURLs(baseURL, requestedURL); - } - return requestedURL; -}; - - -/***/ }), - -/***/ 5062: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); -var transformData = __nccwpck_require__(9812); -var isCancel = __nccwpck_require__(4057); -var defaults = __nccwpck_require__(1626); -var CanceledError = __nccwpck_require__(4098); - -/** - * Throws a `CanceledError` if cancellation has been requested. - */ -function throwIfCancellationRequested(config) { - if (config.cancelToken) { - config.cancelToken.throwIfRequested(); - } - - if (config.signal && config.signal.aborted) { - throw new CanceledError(); - } +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; } - -/** - * Dispatch a request to the server using the configured adapter. - * - * @param {object} config The config that is to be used for the request - * @returns {Promise} The Promise to be fulfilled - */ -module.exports = function dispatchRequest(config) { - throwIfCancellationRequested(config); - - // Ensure headers exist - config.headers = config.headers || {}; - - // Transform request data - config.data = transformData.call( - config, - config.data, - config.headers, - config.transformRequest - ); - - // Flatten headers - config.headers = utils.merge( - config.headers.common || {}, - config.headers[config.method] || {}, - config.headers - ); - - utils.forEach( - ['delete', 'get', 'head', 'post', 'put', 'patch', 'common'], - function cleanHeaderConfig(method) { - delete config.headers[method]; - } - ); - - var adapter = config.adapter || defaults.adapter; - - return adapter(config).then(function onAdapterResolution(response) { - throwIfCancellationRequested(config); - - // Transform response data - response.data = transformData.call( - config, - response.data, - response.headers, - config.transformResponse - ); - - return response; - }, function onAdapterRejection(reason) { - if (!isCancel(reason)) { - throwIfCancellationRequested(config); - - // Transform response data - if (reason && reason.response) { - reason.response.data = transformData.call( - config, - reason.response.data, - reason.response.headers, - config.transformResponse - ); - } - } - - return Promise.reject(reason); - }); -}; +paginateRest.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), -/***/ 4831: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -/** - * Config-specific merge-function which creates a new config-object - * by merging two configuration objects together. - * - * @param {Object} config1 - * @param {Object} config2 - * @returns {Object} New object resulting from merging config2 to config1 - */ -module.exports = function mergeConfig(config1, config2) { - // eslint-disable-next-line no-param-reassign - config2 = config2 || {}; - var config = {}; - - function getMergedValue(target, source) { - if (utils.isPlainObject(target) && utils.isPlainObject(source)) { - return utils.merge(target, source); - } else if (utils.isPlainObject(source)) { - return utils.merge({}, source); - } else if (utils.isArray(source)) { - return source.slice(); - } - return source; - } - - // eslint-disable-next-line consistent-return - function mergeDeepProperties(prop) { - if (!utils.isUndefined(config2[prop])) { - return getMergedValue(config1[prop], config2[prop]); - } else if (!utils.isUndefined(config1[prop])) { - return getMergedValue(undefined, config1[prop]); - } - } +/***/ 2317: +/***/ ((module) => { - // eslint-disable-next-line consistent-return - function valueFromConfig2(prop) { - if (!utils.isUndefined(config2[prop])) { - return getMergedValue(undefined, config2[prop]); - } - } - // eslint-disable-next-line consistent-return - function defaultToConfig2(prop) { - if (!utils.isUndefined(config2[prop])) { - return getMergedValue(undefined, config2[prop]); - } else if (!utils.isUndefined(config1[prop])) { - return getMergedValue(undefined, config1[prop]); - } +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } - - // eslint-disable-next-line consistent-return - function mergeDirectKeys(prop) { - if (prop in config2) { - return getMergedValue(config1[prop], config2[prop]); - } else if (prop in config1) { - return getMergedValue(undefined, config1[prop]); - } - } - - var mergeMap = { - 'url': valueFromConfig2, - 'method': valueFromConfig2, - 'data': valueFromConfig2, - 'baseURL': defaultToConfig2, - 'transformRequest': defaultToConfig2, - 'transformResponse': defaultToConfig2, - 'paramsSerializer': defaultToConfig2, - 'timeout': defaultToConfig2, - 'timeoutMessage': defaultToConfig2, - 'withCredentials': defaultToConfig2, - 'adapter': defaultToConfig2, - 'responseType': defaultToConfig2, - 'xsrfCookieName': defaultToConfig2, - 'xsrfHeaderName': defaultToConfig2, - 'onUploadProgress': defaultToConfig2, - 'onDownloadProgress': defaultToConfig2, - 'decompress': defaultToConfig2, - 'maxContentLength': defaultToConfig2, - 'maxBodyLength': defaultToConfig2, - 'beforeRedirect': defaultToConfig2, - 'transport': defaultToConfig2, - 'httpAgent': defaultToConfig2, - 'httpsAgent': defaultToConfig2, - 'cancelToken': defaultToConfig2, - 'socketPath': defaultToConfig2, - 'responseEncoding': defaultToConfig2, - 'validateStatus': mergeDirectKeys - }; - - utils.forEach(Object.keys(config1).concat(Object.keys(config2)), function computeConfigValue(prop) { - var merge = mergeMap[prop] || mergeDeepProperties; - var configValue = merge(prop); - (utils.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue); - }); - - return config; + return to; }; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); -/***/ }), - -/***/ 3211: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var AxiosError = __nccwpck_require__(2093); +// pkg/dist-src/version.js +var VERSION = "10.4.1"; -/** - * Resolve or reject a Promise based on response status. - * - * @param {Function} resolve A function that resolves the promise. - * @param {Function} reject A function that rejects the promise. - * @param {object} response The response. - */ -module.exports = function settle(resolve, reject, response) { - var validateStatus = response.config.validateStatus; - if (!response.status || !validateStatus || validateStatus(response.status)) { - resolve(response); - } else { - reject(new AxiosError( - 'Request failed with status code ' + response.status, - [AxiosError.ERR_BAD_REQUEST, AxiosError.ERR_BAD_RESPONSE][Math.floor(response.status / 100) - 4], - response.config, - response.request, - response - )); +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getCustomOidcSubClaimForRepo: [ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setCustomOidcSubClaimForRepo: [ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] + }, + oidc: { + getOidcCustomSubTemplateForOrg: [ + "GET /orgs/{org}/actions/oidc/customization/sub" + ], + updateOidcCustomSubTemplateForOrg: [ + "PUT /orgs/{org}/actions/oidc/customization/sub" + ] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + assignTeamToOrgRole: [ + "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + assignUserToOrgRole: [ + "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"], + createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteCustomOrganizationRole: [ + "DELETE /orgs/{org}/organization-roles/{role_id}" + ], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], + listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], + listOrgRoles: ["GET /orgs/{org}/organization-roles"], + listOrganizationFineGrainedPermissions: [ + "GET /orgs/{org}/organization-fine-grained-permissions" + ], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + patchCustomOrganizationRole: [ + "PATCH /orgs/{org}/organization-roles/{role_id}" + ], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], + revokeAllOrgRolesTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" + ], + revokeAllOrgRolesUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}" + ], + revokeOrgRoleTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + revokeOrgRoleUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + cancelPagesDeployment: [ + "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateCustomPropertiesValues: [ + "PATCH /repos/{owner}/{repo}/properties/values" + ], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesDeployment: [ + "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" + ], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createFork: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" + ], + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] } }; +var endpoints_default = Endpoints; - -/***/ }), - -/***/ 9812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); -var defaults = __nccwpck_require__(1626); - -/** - * Transform the data for a request or a response - * - * @param {Object|String} data The data to be transformed - * @param {Array} headers The headers for the request or response - * @param {Array|Function} fns A single function or Array of functions - * @returns {*} The resulting transformed data - */ -module.exports = function transformData(data, headers, fns) { - var context = this || defaults; - /*eslint no-param-reassign:0*/ - utils.forEach(fns, function transform(fn) { - data = fn.call(context, data, headers); - }); - - return data; -}; - - -/***/ }), - -/***/ 7024: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// eslint-disable-next-line strict -module.exports = __nccwpck_require__(4334); - - -/***/ }), - -/***/ 1626: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); -var normalizeHeaderName = __nccwpck_require__(6240); -var AxiosError = __nccwpck_require__(2093); -var transitionalDefaults = __nccwpck_require__(936); -var toFormData = __nccwpck_require__(470); - -var DEFAULT_CONTENT_TYPE = { - 'Content-Type': 'application/x-www-form-urlencoded' -}; - -function setContentTypeIfUnset(headers, value) { - if (!utils.isUndefined(headers) && utils.isUndefined(headers['Content-Type'])) { - headers['Content-Type'] = value; - } -} - -function getDefaultAdapter() { - var adapter; - if (typeof XMLHttpRequest !== 'undefined') { - // For browsers use XHR adapter - adapter = __nccwpck_require__(3454); - } else if (typeof process !== 'undefined' && Object.prototype.toString.call(process) === '[object process]') { - // For node use HTTP adapter - adapter = __nccwpck_require__(8104); - } - return adapter; -} - -function stringifySafely(rawValue, parser, encoder) { - if (utils.isString(rawValue)) { - try { - (parser || JSON.parse)(rawValue); - return utils.trim(rawValue); - } catch (e) { - if (e.name !== 'SyntaxError') { - throw e; - } - } +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); } - - return (encoder || JSON.stringify)(rawValue); } - -var defaults = { - - transitional: transitionalDefaults, - - adapter: getDefaultAdapter(), - - transformRequest: [function transformRequest(data, headers) { - normalizeHeaderName(headers, 'Accept'); - normalizeHeaderName(headers, 'Content-Type'); - - if (utils.isFormData(data) || - utils.isArrayBuffer(data) || - utils.isBuffer(data) || - utils.isStream(data) || - utils.isFile(data) || - utils.isBlob(data) - ) { - return data; - } - if (utils.isArrayBufferView(data)) { - return data.buffer; - } - if (utils.isURLSearchParams(data)) { - setContentTypeIfUnset(headers, 'application/x-www-form-urlencoded;charset=utf-8'); - return data.toString(); - } - - var isObjectPayload = utils.isObject(data); - var contentType = headers && headers['Content-Type']; - - var isFileList; - - if ((isFileList = utils.isFileList(data)) || (isObjectPayload && contentType === 'multipart/form-data')) { - var _FormData = this.env && this.env.FormData; - return toFormData(isFileList ? {'files[]': data} : data, _FormData && new _FormData()); - } else if (isObjectPayload || contentType === 'application/json') { - setContentTypeIfUnset(headers, 'application/json'); - return stringifySafely(data); - } - - return data; - }], - - transformResponse: [function transformResponse(data) { - var transitional = this.transitional || defaults.transitional; - var silentJSONParsing = transitional && transitional.silentJSONParsing; - var forcedJSONParsing = transitional && transitional.forcedJSONParsing; - var strictJSONParsing = !silentJSONParsing && this.responseType === 'json'; - - if (strictJSONParsing || (forcedJSONParsing && utils.isString(data) && data.length)) { - try { - return JSON.parse(data); - } catch (e) { - if (strictJSONParsing) { - if (e.name === 'SyntaxError') { - throw AxiosError.from(e, AxiosError.ERR_BAD_RESPONSE, this, null, this.response); - } - throw e; - } - } - } - - return data; - }], - - /** - * A timeout in milliseconds to abort a request. If set to 0 (default) a - * timeout is not created. - */ - timeout: 0, - - xsrfCookieName: 'XSRF-TOKEN', - xsrfHeaderName: 'X-XSRF-TOKEN', - - maxContentLength: -1, - maxBodyLength: -1, - - env: { - FormData: __nccwpck_require__(7024) +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); }, - - validateStatus: function validateStatus(status) { - return status >= 200 && status < 300; + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; }, - - headers: { - common: { - 'Accept': 'application/json, text/plain, */*' + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); } + return cache[methodName]; } }; - -utils.forEach(['delete', 'get', 'head'], function forEachMethodNoData(method) { - defaults.headers[method] = {}; -}); - -utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { - defaults.headers[method] = utils.merge(DEFAULT_CONTENT_TYPE); -}); - -module.exports = defaults; - - -/***/ }), - -/***/ 936: -/***/ ((module) => { - - - -module.exports = { - silentJSONParsing: true, - forcedJSONParsing: true, - clarifyTimeoutError: false -}; - - -/***/ }), - -/***/ 4322: -/***/ ((module) => { - -module.exports = { - "version": "0.27.2" -}; - -/***/ }), - -/***/ 7065: -/***/ ((module) => { - - - -module.exports = function bind(fn, thisArg) { - return function wrap() { - var args = new Array(arguments.length); - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i]; - } - return fn.apply(thisArg, args); - }; -}; - - -/***/ }), - -/***/ 646: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -function encode(val) { - return encodeURIComponent(val). - replace(/%3A/gi, ':'). - replace(/%24/g, '$'). - replace(/%2C/gi, ','). - replace(/%20/g, '+'). - replace(/%5B/gi, '['). - replace(/%5D/gi, ']'); -} - -/** - * Build a URL by appending params to the end - * - * @param {string} url The base of the url (e.g., http://www.google.com) - * @param {object} [params] The params to be appended - * @returns {string} The formatted url - */ -module.exports = function buildURL(url, params, paramsSerializer) { - /*eslint no-param-reassign:0*/ - if (!params) { - return url; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); } - - var serializedParams; - if (paramsSerializer) { - serializedParams = paramsSerializer(params); - } else if (utils.isURLSearchParams(params)) { - serializedParams = params.toString(); - } else { - var parts = []; - - utils.forEach(params, function serialize(val, key) { - if (val === null || typeof val === 'undefined') { - return; - } - - if (utils.isArray(val)) { - key = key + '[]'; - } else { - val = [val]; - } - - utils.forEach(val, function parseValue(v) { - if (utils.isDate(v)) { - v = v.toISOString(); - } else if (utils.isObject(v)) { - v = JSON.stringify(v); - } - parts.push(encode(key) + '=' + encode(v)); + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 }); - }); - - serializedParams = parts.join('&'); - } - - if (serializedParams) { - var hashmarkIndex = url.indexOf('#'); - if (hashmarkIndex !== -1) { - url = url.slice(0, hashmarkIndex); + return requestWithDefaults(options); } - - url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams; - } - - return url; -}; - - -/***/ }), - -/***/ 7189: -/***/ ((module) => { - - - -/** - * Creates a new URL by combining the specified URLs - * - * @param {string} baseURL The base URL - * @param {string} relativeURL The relative URL - * @returns {string} The combined URL - */ -module.exports = function combineURLs(baseURL, relativeURL) { - return relativeURL - ? baseURL.replace(/\/+$/, '') + '/' + relativeURL.replace(/^\/+/, '') - : baseURL; -}; - - -/***/ }), - -/***/ 1545: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -module.exports = ( - utils.isStandardBrowserEnv() ? - - // Standard browser envs support document.cookie - (function standardBrowserEnv() { - return { - write: function write(name, value, expires, path, domain, secure) { - var cookie = []; - cookie.push(name + '=' + encodeURIComponent(value)); - - if (utils.isNumber(expires)) { - cookie.push('expires=' + new Date(expires).toGMTString()); - } - - if (utils.isString(path)) { - cookie.push('path=' + path); - } - - if (utils.isString(domain)) { - cookie.push('domain=' + domain); - } - - if (secure === true) { - cookie.push('secure'); - } - - document.cookie = cookie.join('; '); - }, - - read: function read(name) { - var match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)')); - return (match ? decodeURIComponent(match[3]) : null); - }, - - remove: function remove(name) { - this.write(name, '', Date.now() - 86400000); - } - }; - })() : - - // Non standard browser env (web workers, react-native) lack needed support. - (function nonStandardBrowserEnv() { - return { - write: function write() {}, - read: function read() { return null; }, - remove: function remove() {} - }; - })() -); - - -/***/ }), - -/***/ 1301: -/***/ ((module) => { - - - -/** - * Determines whether the specified URL is absolute - * - * @param {string} url The URL to test - * @returns {boolean} True if the specified URL is absolute, otherwise false - */ -module.exports = function isAbsoluteURL(url) { - // A URL is considered absolute if it begins with "://" or "//" (protocol-relative URL). - // RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed - // by any combination of letters, digits, plus, period, or hyphen. - return /^([a-z][a-z\d+\-.]*:)?\/\//i.test(url); -}; - - -/***/ }), - -/***/ 650: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -/** - * Determines whether the payload is an error thrown by Axios - * - * @param {*} payload The value to test - * @returns {boolean} True if the payload is an error thrown by Axios, otherwise false - */ -module.exports = function isAxiosError(payload) { - return utils.isObject(payload) && (payload.isAxiosError === true); -}; - - -/***/ }), - -/***/ 3608: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -module.exports = ( - utils.isStandardBrowserEnv() ? - - // Standard browser envs have full support of the APIs needed to test - // whether the request URL is of the same origin as current location. - (function standardBrowserEnv() { - var msie = /(msie|trident)/i.test(navigator.userAgent); - var urlParsingNode = document.createElement('a'); - var originURL; - - /** - * Parse a URL to discover it's components - * - * @param {String} url The URL to be parsed - * @returns {Object} - */ - function resolveURL(url) { - var href = url; - - if (msie) { - // IE needs attribute set twice to normalize properties - urlParsingNode.setAttribute('href', href); - href = urlParsingNode.href; - } - - urlParsingNode.setAttribute('href', href); - - // urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils - return { - href: urlParsingNode.href, - protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '', - host: urlParsingNode.host, - search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '', - hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '', - hostname: urlParsingNode.hostname, - port: urlParsingNode.port, - pathname: (urlParsingNode.pathname.charAt(0) === '/') ? - urlParsingNode.pathname : - '/' + urlParsingNode.pathname - }; - } - - originURL = resolveURL(window.location.href); - - /** - * Determine if a URL shares the same origin as the current location - * - * @param {String} requestURL The URL to test - * @returns {boolean} True if URL shares the same origin, otherwise false - */ - return function isURLSameOrigin(requestURL) { - var parsed = (utils.isString(requestURL)) ? resolveURL(requestURL) : requestURL; - return (parsed.protocol === originURL.protocol && - parsed.host === originURL.host); - }; - })() : - - // Non standard browser envs (web workers, react-native) lack needed support. - (function nonStandardBrowserEnv() { - return function isURLSameOrigin() { - return true; - }; - })() -); - - -/***/ }), - -/***/ 6240: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -module.exports = function normalizeHeaderName(headers, normalizedName) { - utils.forEach(headers, function processHeader(value, name) { - if (name !== normalizedName && name.toUpperCase() === normalizedName.toUpperCase()) { - headers[normalizedName] = value; - delete headers[name]; + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); } - }); -}; - - -/***/ }), - -/***/ 6455: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var utils = __nccwpck_require__(328); - -// Headers whose duplicates are ignored by node -// c.f. https://nodejs.org/api/http.html#http_message_headers -var ignoreDuplicateOf = [ - 'age', 'authorization', 'content-length', 'content-type', 'etag', - 'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since', - 'last-modified', 'location', 'max-forwards', 'proxy-authorization', - 'referer', 'retry-after', 'user-agent' -]; - -/** - * Parse headers into an object - * - * ``` - * Date: Wed, 27 Aug 2014 08:58:49 GMT - * Content-Type: application/json - * Connection: keep-alive - * Transfer-Encoding: chunked - * ``` - * - * @param {String} headers Headers needing to be parsed - * @returns {Object} Headers parsed into an object - */ -module.exports = function parseHeaders(headers) { - var parsed = {}; - var key; - var val; - var i; - - if (!headers) { return parsed; } - - utils.forEach(headers.split('\n'), function parser(line) { - i = line.indexOf(':'); - key = utils.trim(line.substr(0, i)).toLowerCase(); - val = utils.trim(line.substr(i + 1)); - - if (key) { - if (parsed[key] && ignoreDuplicateOf.indexOf(key) >= 0) { - return; - } - if (key === 'set-cookie') { - parsed[key] = (parsed[key] ? parsed[key] : []).concat([val]); - } else { - parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val; + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } } + return requestWithDefaults(options2); } - }); - - return parsed; -}; - - -/***/ }), - -/***/ 6107: -/***/ ((module) => { - - + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} -module.exports = function parseProtocol(url) { - var match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url); - return match && match[1] || ''; -}; +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), -/***/ 4850: -/***/ ((module) => { - +/***/ 6859: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Syntactic sugar for invoking a function and expanding an array for arguments. - * - * Common use case would be to use `Function.prototype.apply`. - * - * ```js - * function f(x, y, z) {} - * var args = [1, 2, 3]; - * f.apply(null, args); - * ``` - * - * With `spread` this example can be re-written. - * - * ```js - * spread(function(x, y, z) {})([1, 2, 3]); - * ``` - * - * @param {Function} callback - * @returns {Function} - */ -module.exports = function spread(callback) { - return function wrap(arr) { - return callback.apply(null, arr); - }; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(2547); +var import_once = __toESM(__nccwpck_require__(4822)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } }; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), -/***/ 470: +/***/ 3612: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -var utils = __nccwpck_require__(328); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(4657); +var import_universal_user_agent = __nccwpck_require__(599); -/** - * Convert a data object to FormData - * @param {Object} obj - * @param {?Object} [formData] - * @returns {Object} - **/ +// pkg/dist-src/version.js +var VERSION = "8.4.0"; -function toFormData(obj, formData) { - // eslint-disable-next-line no-param-reassign - formData = formData || new FormData(); +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} - var stack = []; +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(6859); - function convertValue(value) { - if (value === null) return ''; +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} - if (utils.isDate(value)) { - return value.toISOString(); +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + var _a, _b, _c, _d; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, + headers: requestOptions.headers, + signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; } - - if (utils.isArrayBuffer(value) || utils.isTypedArray(value)) { - return typeof Blob === 'function' ? new Blob([value]) : Buffer.from(value); + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); } - - return value; - } - - function build(data, parentKey) { - if (utils.isPlainObject(data) || utils.isArray(data)) { - if (stack.indexOf(data) !== -1) { - throw Error('Circular reference detected in ' + parentKey); + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; } - - stack.push(data); - - utils.forEach(data, function each(value, key) { - if (utils.isUndefined(value)) return; - var fullKey = parentKey ? parentKey + '.' + key : key; - var arr; - - if (value && !parentKey && typeof value === 'object') { - if (utils.endsWith(key, '{}')) { - // eslint-disable-next-line no-param-reassign - value = JSON.stringify(value); - } else if (utils.endsWith(key, '[]') && (arr = utils.toArray(value))) { - // eslint-disable-next-line func-names - arr.forEach(function(el) { - !utils.isUndefined(el) && formData.append(fullKey, convertValue(el)); - }); - return; - } - } - - build(value, fullKey); + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions }); - - stack.pop(); - } else { - formData.append(parentKey, convertValue(data)); } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; + } + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; + } + return `${data.message}${suffix}`; + } + return `Unknown error: ${JSON.stringify(data)}`; +} - build(obj); - - return formData; +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); } -module.exports = toFormData; +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), -/***/ 1632: +/***/ 9930: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +module.exports = +{ + parallel : __nccwpck_require__(326), + serial : __nccwpck_require__(4295), + serialOrdered : __nccwpck_require__(4482) +}; -var VERSION = (__nccwpck_require__(4322).version); -var AxiosError = __nccwpck_require__(2093); - -var validators = {}; +/***/ }), -// eslint-disable-next-line func-names -['object', 'boolean', 'number', 'function', 'string', 'symbol'].forEach(function(type, i) { - validators[type] = function validator(thing) { - return typeof thing === type || 'a' + (i < 1 ? 'n ' : ' ') + type; - }; -}); +/***/ 8467: +/***/ ((module) => { -var deprecatedWarnings = {}; +// API +module.exports = abort; /** - * Transitional option validator - * @param {function|boolean?} validator - set to false if the transitional option has been removed - * @param {string?} version - deprecated version / removed since version - * @param {string?} message - some message with additional info - * @returns {function} + * Aborts leftover active jobs + * + * @param {object} state - current state object */ -validators.transitional = function transitional(validator, version, message) { - function formatMessage(opt, desc) { - return '[Axios v' + VERSION + '] Transitional option \'' + opt + '\'' + desc + (message ? '. ' + message : ''); - } - - // eslint-disable-next-line func-names - return function(value, opt, opts) { - if (validator === false) { - throw new AxiosError( - formatMessage(opt, ' has been removed' + (version ? ' in ' + version : '')), - AxiosError.ERR_DEPRECATED - ); - } - - if (version && !deprecatedWarnings[opt]) { - deprecatedWarnings[opt] = true; - // eslint-disable-next-line no-console - console.warn( - formatMessage( - opt, - ' has been deprecated since v' + version + ' and will be removed in the near future' - ) - ); - } +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); - return validator ? validator(value, opt, opts) : true; - }; -}; + // reset leftover jobs + state.jobs = {}; +} /** - * Assert object's properties type - * @param {object} options - * @param {object} schema - * @param {boolean?} allowUnknown + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort */ - -function assertOptions(options, schema, allowUnknown) { - if (typeof options !== 'object') { - throw new AxiosError('options must be an object', AxiosError.ERR_BAD_OPTION_VALUE); - } - var keys = Object.keys(options); - var i = keys.length; - while (i-- > 0) { - var opt = keys[i]; - var validator = schema[opt]; - if (validator) { - var value = options[opt]; - var result = value === undefined || validator(value, opt, options); - if (result !== true) { - throw new AxiosError('option ' + opt + ' must be ' + result, AxiosError.ERR_BAD_OPTION_VALUE); - } - continue; - } - if (allowUnknown !== true) { - throw new AxiosError('Unknown option ' + opt, AxiosError.ERR_BAD_OPTION); - } +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); } } -module.exports = { - assertOptions: assertOptions, - validators: validators -}; - /***/ }), -/***/ 328: +/***/ 8873: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var defer = __nccwpck_require__(9670); - -var bind = __nccwpck_require__(7065); - -// utils is a library of generic helper functions non-specific to axios - -var toString = Object.prototype.toString; - -// eslint-disable-next-line func-names -var kindOf = (function(cache) { - // eslint-disable-next-line func-names - return function(thing) { - var str = toString.call(thing); - return cache[str] || (cache[str] = str.slice(8, -1).toLowerCase()); - }; -})(Object.create(null)); - -function kindOfTest(type) { - type = type.toLowerCase(); - return function isKindOf(thing) { - return kindOf(thing) === type; - }; -} +// API +module.exports = async; /** - * Determine if a value is an Array + * Runs provided callback asynchronously + * even if callback itself is not * - * @param {Object} val The value to test - * @returns {boolean} True if value is an Array, otherwise false + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback */ -function isArray(val) { - return Array.isArray(val); -} +function async(callback) +{ + var isAsync = false; -/** - * Determine if a value is undefined - * - * @param {Object} val The value to test - * @returns {boolean} True if the value is undefined, otherwise false - */ -function isUndefined(val) { - return typeof val === 'undefined'; -} + // check if async happened + defer(function() { isAsync = true; }); -/** - * Determine if a value is a Buffer - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Buffer, otherwise false - */ -function isBuffer(val) { - return val !== null && !isUndefined(val) && val.constructor !== null && !isUndefined(val.constructor) - && typeof val.constructor.isBuffer === 'function' && val.constructor.isBuffer(val); + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; } -/** - * Determine if a value is an ArrayBuffer - * - * @function - * @param {Object} val The value to test - * @returns {boolean} True if value is an ArrayBuffer, otherwise false - */ -var isArrayBuffer = kindOfTest('ArrayBuffer'); +/***/ }), -/** - * Determine if a value is a view on an ArrayBuffer - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false - */ -function isArrayBufferView(val) { - var result; - if ((typeof ArrayBuffer !== 'undefined') && (ArrayBuffer.isView)) { - result = ArrayBuffer.isView(val); - } else { - result = (val) && (val.buffer) && (isArrayBuffer(val.buffer)); - } - return result; -} +/***/ 9670: +/***/ ((module) => { -/** - * Determine if a value is a String - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a String, otherwise false - */ -function isString(val) { - return typeof val === 'string'; -} +module.exports = defer; /** - * Determine if a value is a Number + * Runs provided function on next iteration of the event loop * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Number, otherwise false + * @param {function} fn - function to run */ -function isNumber(val) { - return typeof val === 'number'; -} +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); -/** - * Determine if a value is an Object - * - * @param {Object} val The value to test - * @returns {boolean} True if value is an Object, otherwise false - */ -function isObject(val) { - return val !== null && typeof val === 'object'; + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } } -/** - * Determine if a value is a plain Object - * - * @param {Object} val The value to test - * @return {boolean} True if value is a plain Object, otherwise false - */ -function isPlainObject(val) { - if (kindOf(val) !== 'object') { - return false; - } - var prototype = Object.getPrototypeOf(val); - return prototype === null || prototype === Object.prototype; -} +/***/ }), -/** - * Determine if a value is a Date - * - * @function - * @param {Object} val The value to test - * @returns {boolean} True if value is a Date, otherwise false - */ -var isDate = kindOfTest('Date'); +/***/ 4413: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Determine if a value is a File - * - * @function - * @param {Object} val The value to test - * @returns {boolean} True if value is a File, otherwise false - */ -var isFile = kindOfTest('File'); +var async = __nccwpck_require__(8873) + , abort = __nccwpck_require__(8467) + ; -/** - * Determine if a value is a Blob - * - * @function - * @param {Object} val The value to test - * @returns {boolean} True if value is a Blob, otherwise false - */ -var isBlob = kindOfTest('Blob'); +// API +module.exports = iterate; /** - * Determine if a value is a FileList + * Iterates over each job object * - * @function - * @param {Object} val The value to test - * @returns {boolean} True if value is a File, otherwise false + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed */ -var isFileList = kindOfTest('FileList'); +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; -/** - * Determine if a value is a Function - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Function, otherwise false - */ -function isFunction(val) { - return toString.call(val) === '[object Function]'; -} + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } -/** - * Determine if a value is a Stream - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Stream, otherwise false - */ -function isStream(val) { - return isObject(val) && isFunction(val.pipe); + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); } /** - * Determine if a value is a FormData + * Runs iterator over provided job element * - * @param {Object} thing The value to test - * @returns {boolean} True if value is an FormData, otherwise false + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else */ -function isFormData(thing) { - var pattern = '[object FormData]'; - return thing && ( - (typeof FormData === 'function' && thing instanceof FormData) || - toString.call(thing) === pattern || - (isFunction(thing.toString) && thing.toString() === pattern) - ); -} +function runJob(iterator, key, item, callback) +{ + var aborter; -/** - * Determine if a value is a URLSearchParams object - * @function - * @param {Object} val The value to test - * @returns {boolean} True if value is a URLSearchParams object, otherwise false - */ -var isURLSearchParams = kindOfTest('URLSearchParams'); + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } -/** - * Trim excess whitespace off the beginning and end of a string - * - * @param {String} str The String to trim - * @returns {String} The String freed of excess whitespace - */ -function trim(str) { - return str.trim ? str.trim() : str.replace(/^\s+|\s+$/g, ''); + return aborter; } + +/***/ }), + +/***/ 5464: +/***/ ((module) => { + +// API +module.exports = state; + /** - * Determine if we're running in a standard browser environment - * - * This allows axios to run in a web worker, and react-native. - * Both environments support XMLHttpRequest, but not fully standard globals. - * - * web workers: - * typeof window -> undefined - * typeof document -> undefined + * Creates initial state object + * for iteration over list * - * react-native: - * navigator.product -> 'ReactNative' - * nativescript - * navigator.product -> 'NativeScript' or 'NS' + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object */ -function isStandardBrowserEnv() { - if (typeof navigator !== 'undefined' && (navigator.product === 'ReactNative' || - navigator.product === 'NativeScript' || - navigator.product === 'NS')) { - return false; +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); } - return ( - typeof window !== 'undefined' && - typeof document !== 'undefined' - ); + + return initState; } + +/***/ }), + +/***/ 447: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var abort = __nccwpck_require__(8467) + , async = __nccwpck_require__(8873) + ; + +// API +module.exports = terminator; + /** - * Iterate over an Array or an Object invoking a function for each item. - * - * If `obj` is an Array callback will be called passing - * the value, index, and complete array for each item. - * - * If 'obj' is an Object callback will be called passing - * the value, key, and complete object for each property. + * Terminates jobs in the attached state context * - * @param {Object|Array} obj The object to iterate - * @param {Function} fn The callback to invoke for each item + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination */ -function forEach(obj, fn) { - // Don't bother if no value provided - if (obj === null || typeof obj === 'undefined') { +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { return; } - // Force an array if not already something iterable - if (typeof obj !== 'object') { - /*eslint no-param-reassign:0*/ - obj = [obj]; - } + // fast forward iteration index + this.index = this.size; - if (isArray(obj)) { - // Iterate over array values - for (var i = 0, l = obj.length; i < l; i++) { - fn.call(null, obj[i], i, obj); - } - } else { - // Iterate over object keys - for (var key in obj) { - if (Object.prototype.hasOwnProperty.call(obj, key)) { - fn.call(null, obj[key], key, obj); - } - } - } + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); } + +/***/ }), + +/***/ 326: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(4413) + , initState = __nccwpck_require__(5464) + , terminator = __nccwpck_require__(447) + ; + +// Public API +module.exports = parallel; + /** - * Accepts varargs expecting each argument to be an object, then - * immutably merges the properties of each object and returns result. - * - * When multiple objects contain the same key the later object in - * the arguments list will take precedence. - * - * Example: - * - * ```js - * var result = merge({foo: 123}, {foo: 456}); - * console.log(result.foo); // outputs 456 - * ``` + * Runs iterator over provided array elements in parallel * - * @param {Object} obj1 Object to merge - * @returns {Object} Result of all merge properties + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator */ -function merge(/* obj1, obj2, obj3, ... */) { - var result = {}; - function assignValue(val, key) { - if (isPlainObject(result[key]) && isPlainObject(val)) { - result[key] = merge(result[key], val); - } else if (isPlainObject(val)) { - result[key] = merge({}, val); - } else if (isArray(val)) { - result[key] = val.slice(); - } else { - result[key] = val; - } - } +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); - for (var i = 0, l = arguments.length; i < l; i++) { - forEach(arguments[i], assignValue); + state.index++; } - return result; -} -/** - * Extends object a by mutably adding to it the properties of object b. - * - * @param {Object} a The object to be extended - * @param {Object} b The object to copy properties from - * @param {Object} thisArg The object to bind function to - * @return {Object} The resulting value of object a - */ -function extend(a, b, thisArg) { - forEach(b, function assignValue(val, key) { - if (thisArg && typeof val === 'function') { - a[key] = bind(val, thisArg); - } else { - a[key] = val; - } - }); - return a; + return terminator.bind(state, callback); } + +/***/ }), + +/***/ 4295: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var serialOrdered = __nccwpck_require__(4482); + +// Public API +module.exports = serial; + /** - * Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) + * Runs iterator over provided array elements in series * - * @param {string} content with BOM - * @return {string} content value without BOM + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator */ -function stripBOM(content) { - if (content.charCodeAt(0) === 0xFEFF) { - content = content.slice(1); - } - return content; +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); } -/** - * Inherit the prototype methods from one constructor into another - * @param {function} constructor - * @param {function} superConstructor - * @param {object} [props] - * @param {object} [descriptors] - */ -function inherits(constructor, superConstructor, props, descriptors) { - constructor.prototype = Object.create(superConstructor.prototype, descriptors); - constructor.prototype.constructor = constructor; - props && Object.assign(constructor.prototype, props); -} +/***/ }), + +/***/ 4482: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(4413) + , initState = __nccwpck_require__(5464) + , terminator = __nccwpck_require__(447) + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; /** - * Resolve object with deep prototype chain to a flat object - * @param {Object} sourceObj source object - * @param {Object} [destObj] - * @param {Function} [filter] - * @returns {Object} + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); -function toFlatObject(sourceObj, destObj, filter) { - var props; - var i; - var prop; - var merged = {}; + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } - destObj = destObj || {}; + state.index++; - do { - props = Object.getOwnPropertyNames(sourceObj); - i = props.length; - while (i-- > 0) { - prop = props[i]; - if (!merged[prop]) { - destObj[prop] = sourceObj[prop]; - merged[prop] = true; - } + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; } - sourceObj = Object.getPrototypeOf(sourceObj); - } while (sourceObj && (!filter || filter(sourceObj, destObj)) && sourceObj !== Object.prototype); - return destObj; + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); } /* - * determines whether a string ends with the characters of a specified string - * @param {String} str - * @param {String} searchString - * @param {Number} [position= 0] - * @returns {boolean} + * -- Sort methods */ -function endsWith(str, searchString, position) { - str = String(str); - if (position === undefined || position > str.length) { - position = str.length; - } - position -= searchString.length; - var lastIndex = str.indexOf(searchString, position); - return lastIndex !== -1 && lastIndex === position; -} - /** - * Returns new array from array like object - * @param {*} [thing] - * @returns {Array} + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result */ -function toArray(thing) { - if (!thing) return null; - var i = thing.length; - if (isUndefined(i)) return null; - var arr = new Array(i); - while (i-- > 0) { - arr[i] = thing[i]; - } - return arr; +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; } -// eslint-disable-next-line func-names -var isTypedArray = (function(TypedArray) { - // eslint-disable-next-line func-names - return function(thing) { - return TypedArray && thing instanceof TypedArray; - }; -})(typeof Uint8Array !== 'undefined' && Object.getPrototypeOf(Uint8Array)); - -module.exports = { - isArray: isArray, - isArrayBuffer: isArrayBuffer, - isBuffer: isBuffer, - isFormData: isFormData, - isArrayBufferView: isArrayBufferView, - isString: isString, - isNumber: isNumber, - isObject: isObject, - isPlainObject: isPlainObject, - isUndefined: isUndefined, - isDate: isDate, - isFile: isFile, - isBlob: isBlob, - isFunction: isFunction, - isStream: isStream, - isURLSearchParams: isURLSearchParams, - isStandardBrowserEnv: isStandardBrowserEnv, - forEach: forEach, - merge: merge, - extend: extend, - trim: trim, - stripBOM: stripBOM, - inherits: inherits, - toFlatObject: toFlatObject, - kindOf: kindOf, - kindOfTest: kindOfTest, - endsWith: endsWith, - toArray: toArray, - isTypedArray: isTypedArray, - isFileList: isFileList -}; +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} /***/ }), -/***/ 9417: +/***/ 2634: /***/ ((module) => { @@ -9081,12 +6266,12 @@ function range(a, b, str) { /***/ }), -/***/ 3682: +/***/ 5596: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var register = __nccwpck_require__(4670); -var addHook = __nccwpck_require__(5549); -var removeHook = __nccwpck_require__(6819); +var register = __nccwpck_require__(6042); +var addHook = __nccwpck_require__(7935); +var removeHook = __nccwpck_require__(2395); // bind with array of arguments: https://stackoverflow.com/a/21792913 var bind = Function.bind; @@ -9149,7 +6334,7 @@ module.exports.Collection = Hook.Collection; /***/ }), -/***/ 5549: +/***/ 7935: /***/ ((module) => { module.exports = addHook; @@ -9202,7 +6387,7 @@ function addHook(state, kind, name, hook) { /***/ }), -/***/ 4670: +/***/ 6042: /***/ ((module) => { module.exports = register; @@ -9236,7 +6421,7 @@ function register(state, name, method, options) { /***/ }), -/***/ 6819: +/***/ 2395: /***/ ((module) => { module.exports = removeHook; @@ -9262,7 +6447,7 @@ function removeHook(state, name, method) { /***/ }), -/***/ 1575: +/***/ 1563: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /* module decorator */ module = __nccwpck_require__.nmd(module); @@ -10587,7 +7772,7 @@ var bigInt = (function (undefined) { BigInteger.prototype.toString = function (radix, alphabet) { if (radix === undefined) radix = 10; - if (radix !== 10) return toBaseString(this, radix, alphabet); + if (radix !== 10 || alphabet) return toBaseString(this, radix, alphabet); var v = this.value, l = v.length, str = String(v[--l]), zeros = "0000000", digit; while (--l >= 0) { digit = String(v[l]); @@ -10599,7 +7784,7 @@ var bigInt = (function (undefined) { SmallInteger.prototype.toString = function (radix, alphabet) { if (radix === undefined) radix = 10; - if (radix != 10) return toBaseString(this, radix, alphabet); + if (radix != 10 || alphabet) return toBaseString(this, radix, alphabet); return String(this.value); }; @@ -10723,446 +7908,7 @@ if (typeof define === "function" && define.amd) { /***/ }), -/***/ 6474: -/***/ ((module, exports, __nccwpck_require__) => { - -var Chainsaw = __nccwpck_require__(6533); -var EventEmitter = (__nccwpck_require__(2361).EventEmitter); -var Buffers = __nccwpck_require__(1590); -var Vars = __nccwpck_require__(3755); -var Stream = (__nccwpck_require__(2781).Stream); - -exports = module.exports = function (bufOrEm, eventName) { - if (Buffer.isBuffer(bufOrEm)) { - return exports.parse(bufOrEm); - } - - var s = exports.stream(); - if (bufOrEm && bufOrEm.pipe) { - bufOrEm.pipe(s); - } - else if (bufOrEm) { - bufOrEm.on(eventName || 'data', function (buf) { - s.write(buf); - }); - - bufOrEm.on('end', function () { - s.end(); - }); - } - return s; -}; - -exports.stream = function (input) { - if (input) return exports.apply(null, arguments); - - var pending = null; - function getBytes (bytes, cb, skip) { - pending = { - bytes : bytes, - skip : skip, - cb : function (buf) { - pending = null; - cb(buf); - }, - }; - dispatch(); - } - - var offset = null; - function dispatch () { - if (!pending) { - if (caughtEnd) done = true; - return; - } - if (typeof pending === 'function') { - pending(); - } - else { - var bytes = offset + pending.bytes; - - if (buffers.length >= bytes) { - var buf; - if (offset == null) { - buf = buffers.splice(0, bytes); - if (!pending.skip) { - buf = buf.slice(); - } - } - else { - if (!pending.skip) { - buf = buffers.slice(offset, bytes); - } - offset = bytes; - } - - if (pending.skip) { - pending.cb(); - } - else { - pending.cb(buf); - } - } - } - } - - function builder (saw) { - function next () { if (!done) saw.next() } - - var self = words(function (bytes, cb) { - return function (name) { - getBytes(bytes, function (buf) { - vars.set(name, cb(buf)); - next(); - }); - }; - }); - - self.tap = function (cb) { - saw.nest(cb, vars.store); - }; - - self.into = function (key, cb) { - if (!vars.get(key)) vars.set(key, {}); - var parent = vars; - vars = Vars(parent.get(key)); - - saw.nest(function () { - cb.apply(this, arguments); - this.tap(function () { - vars = parent; - }); - }, vars.store); - }; - - self.flush = function () { - vars.store = {}; - next(); - }; - - self.loop = function (cb) { - var end = false; - - saw.nest(false, function loop () { - this.vars = vars.store; - cb.call(this, function () { - end = true; - next(); - }, vars.store); - this.tap(function () { - if (end) saw.next() - else loop.call(this) - }.bind(this)); - }, vars.store); - }; - - self.buffer = function (name, bytes) { - if (typeof bytes === 'string') { - bytes = vars.get(bytes); - } - - getBytes(bytes, function (buf) { - vars.set(name, buf); - next(); - }); - }; - - self.skip = function (bytes) { - if (typeof bytes === 'string') { - bytes = vars.get(bytes); - } - - getBytes(bytes, function () { - next(); - }); - }; - - self.scan = function find (name, search) { - if (typeof search === 'string') { - search = new Buffer(search); - } - else if (!Buffer.isBuffer(search)) { - throw new Error('search must be a Buffer or a string'); - } - - var taken = 0; - pending = function () { - var pos = buffers.indexOf(search, offset + taken); - var i = pos-offset-taken; - if (pos !== -1) { - pending = null; - if (offset != null) { - vars.set( - name, - buffers.slice(offset, offset + taken + i) - ); - offset += taken + i + search.length; - } - else { - vars.set( - name, - buffers.slice(0, taken + i) - ); - buffers.splice(0, taken + i + search.length); - } - next(); - dispatch(); - } else { - i = Math.max(buffers.length - search.length - offset - taken, 0); - } - taken += i; - }; - dispatch(); - }; - - self.peek = function (cb) { - offset = 0; - saw.nest(function () { - cb.call(this, vars.store); - this.tap(function () { - offset = null; - }); - }); - }; - - return self; - }; - - var stream = Chainsaw.light(builder); - stream.writable = true; - - var buffers = Buffers(); - - stream.write = function (buf) { - buffers.push(buf); - dispatch(); - }; - - var vars = Vars(); - - var done = false, caughtEnd = false; - stream.end = function () { - caughtEnd = true; - }; - - stream.pipe = Stream.prototype.pipe; - Object.getOwnPropertyNames(EventEmitter.prototype).forEach(function (name) { - stream[name] = EventEmitter.prototype[name]; - }); - - return stream; -}; - -exports.parse = function parse (buffer) { - var self = words(function (bytes, cb) { - return function (name) { - if (offset + bytes <= buffer.length) { - var buf = buffer.slice(offset, offset + bytes); - offset += bytes; - vars.set(name, cb(buf)); - } - else { - vars.set(name, null); - } - return self; - }; - }); - - var offset = 0; - var vars = Vars(); - self.vars = vars.store; - - self.tap = function (cb) { - cb.call(self, vars.store); - return self; - }; - - self.into = function (key, cb) { - if (!vars.get(key)) { - vars.set(key, {}); - } - var parent = vars; - vars = Vars(parent.get(key)); - cb.call(self, vars.store); - vars = parent; - return self; - }; - - self.loop = function (cb) { - var end = false; - var ender = function () { end = true }; - while (end === false) { - cb.call(self, ender, vars.store); - } - return self; - }; - - self.buffer = function (name, size) { - if (typeof size === 'string') { - size = vars.get(size); - } - var buf = buffer.slice(offset, Math.min(buffer.length, offset + size)); - offset += size; - vars.set(name, buf); - - return self; - }; - - self.skip = function (bytes) { - if (typeof bytes === 'string') { - bytes = vars.get(bytes); - } - offset += bytes; - - return self; - }; - - self.scan = function (name, search) { - if (typeof search === 'string') { - search = new Buffer(search); - } - else if (!Buffer.isBuffer(search)) { - throw new Error('search must be a Buffer or a string'); - } - vars.set(name, null); - - // simple but slow string search - for (var i = 0; i + offset <= buffer.length - search.length + 1; i++) { - for ( - var j = 0; - j < search.length && buffer[offset+i+j] === search[j]; - j++ - ); - if (j === search.length) break; - } - - vars.set(name, buffer.slice(offset, offset + i)); - offset += i + search.length; - return self; - }; - - self.peek = function (cb) { - var was = offset; - cb.call(self, vars.store); - offset = was; - return self; - }; - - self.flush = function () { - vars.store = {}; - return self; - }; - - self.eof = function () { - return offset >= buffer.length; - }; - - return self; -}; - -// convert byte strings to unsigned little endian numbers -function decodeLEu (bytes) { - var acc = 0; - for (var i = 0; i < bytes.length; i++) { - acc += Math.pow(256,i) * bytes[i]; - } - return acc; -} - -// convert byte strings to unsigned big endian numbers -function decodeBEu (bytes) { - var acc = 0; - for (var i = 0; i < bytes.length; i++) { - acc += Math.pow(256, bytes.length - i - 1) * bytes[i]; - } - return acc; -} - -// convert byte strings to signed big endian numbers -function decodeBEs (bytes) { - var val = decodeBEu(bytes); - if ((bytes[0] & 0x80) == 0x80) { - val -= Math.pow(256, bytes.length); - } - return val; -} - -// convert byte strings to signed little endian numbers -function decodeLEs (bytes) { - var val = decodeLEu(bytes); - if ((bytes[bytes.length - 1] & 0x80) == 0x80) { - val -= Math.pow(256, bytes.length); - } - return val; -} - -function words (decode) { - var self = {}; - - [ 1, 2, 4, 8 ].forEach(function (bytes) { - var bits = bytes * 8; - - self['word' + bits + 'le'] - = self['word' + bits + 'lu'] - = decode(bytes, decodeLEu); - - self['word' + bits + 'ls'] - = decode(bytes, decodeLEs); - - self['word' + bits + 'be'] - = self['word' + bits + 'bu'] - = decode(bytes, decodeBEu); - - self['word' + bits + 'bs'] - = decode(bytes, decodeBEs); - }); - - // word8be(n) == word8le(n) for all n - self.word8 = self.word8u = self.word8be; - self.word8s = self.word8bs; - - return self; -} - - -/***/ }), - -/***/ 3755: -/***/ ((module) => { - -module.exports = function (store) { - function getset (name, value) { - var node = vars.store; - var keys = name.split('.'); - keys.slice(0,-1).forEach(function (k) { - if (node[k] === undefined) node[k] = {}; - node = node[k] - }); - var key = keys[keys.length - 1]; - if (arguments.length == 1) { - return node[key]; - } - else { - return node[key] = value; - } - } - - var vars = { - get : function (name) { - return getset(name); - }, - set : function (name, value) { - return getset(name, value); - }, - store : store || {}, - }; - return vars; -}; - - -/***/ }), - -/***/ 5490: +/***/ 6201: /***/ ((module) => { @@ -11190,15 +7936,15 @@ Promise.prototype.any = function () { /***/ }), -/***/ 8061: +/***/ 3379: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var firstLineError; try {throw new Error(); } catch (e) {firstLineError = e;} -var schedule = __nccwpck_require__(6203); -var Queue = __nccwpck_require__(878); -var util = __nccwpck_require__(7448); +var schedule = __nccwpck_require__(8236); +var Queue = __nccwpck_require__(956); +var util = __nccwpck_require__(2587); function Async() { this._customScheduler = false; @@ -11358,7 +8104,7 @@ module.exports.firstLineError = firstLineError; /***/ }), -/***/ 3767: +/***/ 276: /***/ ((module) => { @@ -11432,7 +8178,7 @@ Promise.bind = function (thisArg, value) { /***/ }), -/***/ 8710: +/***/ 3513: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { @@ -11443,14 +8189,14 @@ function noConflict() { catch (e) {} return bluebird; } -var bluebird = __nccwpck_require__(3694)(); +var bluebird = __nccwpck_require__(5810)(); bluebird.noConflict = noConflict; module.exports = bluebird; /***/ }), -/***/ 924: +/***/ 367: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { @@ -11462,7 +8208,7 @@ if (cr) { } module.exports = function(Promise) { -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var canEvaluate = util.canEvaluate; var isIdentifier = util.isIdentifier; @@ -11580,12 +8326,12 @@ Promise.prototype.get = function (propertyName) { /***/ }), -/***/ 6616: +/***/ 8087: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function(Promise, PromiseArray, apiRejection, debug) { -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var tryCatch = util.tryCatch; var errorObj = util.errorObj; var async = Promise._async; @@ -11716,13 +8462,13 @@ Promise.prototype._resultCancelled = function() { /***/ }), -/***/ 8985: +/***/ 2704: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function(NEXT_FILTER) { -var util = __nccwpck_require__(7448); -var getKeys = (__nccwpck_require__(3062).keys); +var util = __nccwpck_require__(2587); +var getKeys = (__nccwpck_require__(9451).keys); var tryCatch = util.tryCatch; var errorObj = util.errorObj; @@ -11765,7 +8511,7 @@ return catchFilter; /***/ }), -/***/ 5422: +/***/ 2661: /***/ ((module) => { @@ -11841,15 +8587,15 @@ return Context; /***/ }), -/***/ 6004: +/***/ 699: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function(Promise, Context) { var getDomain = Promise._getDomain; var async = Promise._async; -var Warning = (__nccwpck_require__(5816).Warning); -var util = __nccwpck_require__(7448); +var Warning = (__nccwpck_require__(5514).Warning); +var util = __nccwpck_require__(2587); var canAttachTrace = util.canAttachTrace; var unhandledRejectionHandled; var possiblyUnhandledRejection; @@ -12764,7 +9510,7 @@ return { /***/ }), -/***/ 8277: +/***/ 3781: /***/ ((module) => { @@ -12817,7 +9563,7 @@ Promise.prototype.catchReturn = function (value) { /***/ }), -/***/ 838: +/***/ 8706: /***/ ((module) => { @@ -12854,13 +9600,13 @@ Promise.mapSeries = PromiseMapSeries; /***/ }), -/***/ 5816: +/***/ 5514: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var es5 = __nccwpck_require__(3062); +var es5 = __nccwpck_require__(9451); var Objectfreeze = es5.freeze; -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var inherits = util.inherits; var notEnumerableProp = util.notEnumerableProp; @@ -12977,7 +9723,7 @@ module.exports = { /***/ }), -/***/ 3062: +/***/ 9451: /***/ ((module) => { var isES5 = (function(){ @@ -13064,7 +9810,7 @@ if (isES5) { /***/ }), -/***/ 2223: +/***/ 6630: /***/ ((module) => { @@ -13083,12 +9829,12 @@ Promise.filter = function (promises, fn, options) { /***/ }), -/***/ 7304: +/***/ 3254: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function(Promise, tryConvertToPromise) { -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var CancellationError = Promise.CancellationError; var errorObj = util.errorObj; @@ -13201,7 +9947,7 @@ return PassThroughHandlerContext; /***/ }), -/***/ 8619: +/***/ 60: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { @@ -13211,9 +9957,9 @@ module.exports = function(Promise, tryConvertToPromise, Proxyable, debug) { -var errors = __nccwpck_require__(5816); +var errors = __nccwpck_require__(5514); var TypeError = errors.TypeError; -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var errorObj = util.errorObj; var tryCatch = util.tryCatch; var yieldHandlers = []; @@ -13431,14 +10177,14 @@ Promise.spawn = function (generatorFunction) { /***/ }), -/***/ 5248: +/***/ 6188: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function(Promise, PromiseArray, tryConvertToPromise, INTERNAL, async, getDomain) { -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var canEvaluate = util.canEvaluate; var tryCatch = util.tryCatch; var errorObj = util.errorObj; @@ -13606,7 +10352,7 @@ Promise.join = function () { /***/ }), -/***/ 8150: +/***/ 7908: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { @@ -13617,7 +10363,7 @@ module.exports = function(Promise, INTERNAL, debug) { var getDomain = Promise._getDomain; -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var tryCatch = util.tryCatch; var errorObj = util.errorObj; var async = Promise._async; @@ -13781,13 +10527,13 @@ Promise.map = function (promises, fn, options, _filter) { /***/ }), -/***/ 7415: +/***/ 1610: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function(Promise, INTERNAL, tryConvertToPromise, apiRejection, debug) { -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var tryCatch = util.tryCatch; Promise.method = function (fn) { @@ -13843,15 +10589,15 @@ Promise.prototype._resolveFromSyncValue = function (value) { /***/ }), -/***/ 4315: +/***/ 4696: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var maybeWrapAsError = util.maybeWrapAsError; -var errors = __nccwpck_require__(5816); +var errors = __nccwpck_require__(5514); var OperationalError = errors.OperationalError; -var es5 = __nccwpck_require__(3062); +var es5 = __nccwpck_require__(9451); function isUntypedError(obj) { return obj instanceof Error && @@ -13901,12 +10647,12 @@ module.exports = nodebackForPromise; /***/ }), -/***/ 5447: +/***/ 8936: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function(Promise) { -var util = __nccwpck_require__(7448); +var util = __nccwpck_require__(2587); var async = Promise._async; var tryCatch = util.tryCatch; var errorObj = util.errorObj; @@ -13953,2320 +10699,1565 @@ Promise.prototype.asCallback = Promise.prototype.nodeify = function (nodeback, } this._then( adapter, - errorAdapter, - undefined, - this, - nodeback - ); - } - return this; -}; -}; - - -/***/ }), - -/***/ 3694: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -module.exports = function() { -var makeSelfResolutionError = function () { - return new TypeError("circular promise resolution chain\u000a\u000a See http://goo.gl/MqrFmX\u000a"); -}; -var reflectHandler = function() { - return new Promise.PromiseInspection(this._target()); -}; -var apiRejection = function(msg) { - return Promise.reject(new TypeError(msg)); -}; -function Proxyable() {} -var UNDEFINED_BINDING = {}; -var util = __nccwpck_require__(7448); - -var getDomain; -if (util.isNode) { - getDomain = function() { - var ret = process.domain; - if (ret === undefined) ret = null; - return ret; - }; -} else { - getDomain = function() { - return null; - }; -} -util.notEnumerableProp(Promise, "_getDomain", getDomain); - -var es5 = __nccwpck_require__(3062); -var Async = __nccwpck_require__(8061); -var async = new Async(); -es5.defineProperty(Promise, "_async", {value: async}); -var errors = __nccwpck_require__(5816); -var TypeError = Promise.TypeError = errors.TypeError; -Promise.RangeError = errors.RangeError; -var CancellationError = Promise.CancellationError = errors.CancellationError; -Promise.TimeoutError = errors.TimeoutError; -Promise.OperationalError = errors.OperationalError; -Promise.RejectionError = errors.OperationalError; -Promise.AggregateError = errors.AggregateError; -var INTERNAL = function(){}; -var APPLY = {}; -var NEXT_FILTER = {}; -var tryConvertToPromise = __nccwpck_require__(9787)(Promise, INTERNAL); -var PromiseArray = - __nccwpck_require__(5307)(Promise, INTERNAL, - tryConvertToPromise, apiRejection, Proxyable); -var Context = __nccwpck_require__(5422)(Promise); - /*jshint unused:false*/ -var createContext = Context.create; -var debug = __nccwpck_require__(6004)(Promise, Context); -var CapturedTrace = debug.CapturedTrace; -var PassThroughHandlerContext = - __nccwpck_require__(7304)(Promise, tryConvertToPromise); -var catchFilter = __nccwpck_require__(8985)(NEXT_FILTER); -var nodebackForPromise = __nccwpck_require__(4315); -var errorObj = util.errorObj; -var tryCatch = util.tryCatch; -function check(self, executor) { - if (typeof executor !== "function") { - throw new TypeError("expecting a function but got " + util.classString(executor)); - } - if (self.constructor !== Promise) { - throw new TypeError("the promise constructor cannot be invoked directly\u000a\u000a See http://goo.gl/MqrFmX\u000a"); - } -} - -function Promise(executor) { - this._bitField = 0; - this._fulfillmentHandler0 = undefined; - this._rejectionHandler0 = undefined; - this._promise0 = undefined; - this._receiver0 = undefined; - if (executor !== INTERNAL) { - check(this, executor); - this._resolveFromExecutor(executor); - } - this._promiseCreated(); - this._fireEvent("promiseCreated", this); -} - -Promise.prototype.toString = function () { - return "[object Promise]"; -}; - -Promise.prototype.caught = Promise.prototype["catch"] = function (fn) { - var len = arguments.length; - if (len > 1) { - var catchInstances = new Array(len - 1), - j = 0, i; - for (i = 0; i < len - 1; ++i) { - var item = arguments[i]; - if (util.isObject(item)) { - catchInstances[j++] = item; - } else { - return apiRejection("expecting an object but got " + - "A catch statement predicate " + util.classString(item)); - } - } - catchInstances.length = j; - fn = arguments[i]; - return this.then(undefined, catchFilter(catchInstances, fn, this)); - } - return this.then(undefined, fn); -}; - -Promise.prototype.reflect = function () { - return this._then(reflectHandler, - reflectHandler, undefined, this, undefined); -}; - -Promise.prototype.then = function (didFulfill, didReject) { - if (debug.warnings() && arguments.length > 0 && - typeof didFulfill !== "function" && - typeof didReject !== "function") { - var msg = ".then() only accepts functions but was passed: " + - util.classString(didFulfill); - if (arguments.length > 1) { - msg += ", " + util.classString(didReject); - } - this._warn(msg); - } - return this._then(didFulfill, didReject, undefined, undefined, undefined); -}; - -Promise.prototype.done = function (didFulfill, didReject) { - var promise = - this._then(didFulfill, didReject, undefined, undefined, undefined); - promise._setIsFinal(); -}; - -Promise.prototype.spread = function (fn) { - if (typeof fn !== "function") { - return apiRejection("expecting a function but got " + util.classString(fn)); - } - return this.all()._then(fn, undefined, undefined, APPLY, undefined); -}; - -Promise.prototype.toJSON = function () { - var ret = { - isFulfilled: false, - isRejected: false, - fulfillmentValue: undefined, - rejectionReason: undefined - }; - if (this.isFulfilled()) { - ret.fulfillmentValue = this.value(); - ret.isFulfilled = true; - } else if (this.isRejected()) { - ret.rejectionReason = this.reason(); - ret.isRejected = true; - } - return ret; -}; - -Promise.prototype.all = function () { - if (arguments.length > 0) { - this._warn(".all() was passed arguments but it does not take any"); - } - return new PromiseArray(this).promise(); -}; - -Promise.prototype.error = function (fn) { - return this.caught(util.originatesFromRejection, fn); -}; - -Promise.getNewLibraryCopy = module.exports; - -Promise.is = function (val) { - return val instanceof Promise; -}; - -Promise.fromNode = Promise.fromCallback = function(fn) { - var ret = new Promise(INTERNAL); - ret._captureStackTrace(); - var multiArgs = arguments.length > 1 ? !!Object(arguments[1]).multiArgs - : false; - var result = tryCatch(fn)(nodebackForPromise(ret, multiArgs)); - if (result === errorObj) { - ret._rejectCallback(result.e, true); - } - if (!ret._isFateSealed()) ret._setAsyncGuaranteed(); - return ret; -}; - -Promise.all = function (promises) { - return new PromiseArray(promises).promise(); -}; - -Promise.cast = function (obj) { - var ret = tryConvertToPromise(obj); - if (!(ret instanceof Promise)) { - ret = new Promise(INTERNAL); - ret._captureStackTrace(); - ret._setFulfilled(); - ret._rejectionHandler0 = obj; - } - return ret; -}; - -Promise.resolve = Promise.fulfilled = Promise.cast; - -Promise.reject = Promise.rejected = function (reason) { - var ret = new Promise(INTERNAL); - ret._captureStackTrace(); - ret._rejectCallback(reason, true); - return ret; -}; - -Promise.setScheduler = function(fn) { - if (typeof fn !== "function") { - throw new TypeError("expecting a function but got " + util.classString(fn)); - } - return async.setScheduler(fn); -}; - -Promise.prototype._then = function ( - didFulfill, - didReject, - _, receiver, - internalData -) { - var haveInternalData = internalData !== undefined; - var promise = haveInternalData ? internalData : new Promise(INTERNAL); - var target = this._target(); - var bitField = target._bitField; - - if (!haveInternalData) { - promise._propagateFrom(this, 3); - promise._captureStackTrace(); - if (receiver === undefined && - ((this._bitField & 2097152) !== 0)) { - if (!((bitField & 50397184) === 0)) { - receiver = this._boundValue(); - } else { - receiver = target === this ? undefined : this._boundTo; - } - } - this._fireEvent("promiseChained", this, promise); - } - - var domain = getDomain(); - if (!((bitField & 50397184) === 0)) { - var handler, value, settler = target._settlePromiseCtx; - if (((bitField & 33554432) !== 0)) { - value = target._rejectionHandler0; - handler = didFulfill; - } else if (((bitField & 16777216) !== 0)) { - value = target._fulfillmentHandler0; - handler = didReject; - target._unsetRejectionIsUnhandled(); - } else { - settler = target._settlePromiseLateCancellationObserver; - value = new CancellationError("late cancellation observer"); - target._attachExtraTrace(value); - handler = didReject; - } - - async.invoke(settler, target, { - handler: domain === null ? handler - : (typeof handler === "function" && - util.domainBind(domain, handler)), - promise: promise, - receiver: receiver, - value: value - }); - } else { - target._addCallbacks(didFulfill, didReject, promise, receiver, domain); - } - - return promise; -}; - -Promise.prototype._length = function () { - return this._bitField & 65535; -}; - -Promise.prototype._isFateSealed = function () { - return (this._bitField & 117506048) !== 0; -}; - -Promise.prototype._isFollowing = function () { - return (this._bitField & 67108864) === 67108864; -}; - -Promise.prototype._setLength = function (len) { - this._bitField = (this._bitField & -65536) | - (len & 65535); -}; - -Promise.prototype._setFulfilled = function () { - this._bitField = this._bitField | 33554432; - this._fireEvent("promiseFulfilled", this); -}; - -Promise.prototype._setRejected = function () { - this._bitField = this._bitField | 16777216; - this._fireEvent("promiseRejected", this); -}; - -Promise.prototype._setFollowing = function () { - this._bitField = this._bitField | 67108864; - this._fireEvent("promiseResolved", this); -}; - -Promise.prototype._setIsFinal = function () { - this._bitField = this._bitField | 4194304; -}; - -Promise.prototype._isFinal = function () { - return (this._bitField & 4194304) > 0; -}; - -Promise.prototype._unsetCancelled = function() { - this._bitField = this._bitField & (~65536); -}; - -Promise.prototype._setCancelled = function() { - this._bitField = this._bitField | 65536; - this._fireEvent("promiseCancelled", this); -}; - -Promise.prototype._setWillBeCancelled = function() { - this._bitField = this._bitField | 8388608; -}; - -Promise.prototype._setAsyncGuaranteed = function() { - if (async.hasCustomScheduler()) return; - this._bitField = this._bitField | 134217728; -}; - -Promise.prototype._receiverAt = function (index) { - var ret = index === 0 ? this._receiver0 : this[ - index * 4 - 4 + 3]; - if (ret === UNDEFINED_BINDING) { - return undefined; - } else if (ret === undefined && this._isBound()) { - return this._boundValue(); - } - return ret; -}; - -Promise.prototype._promiseAt = function (index) { - return this[ - index * 4 - 4 + 2]; -}; - -Promise.prototype._fulfillmentHandlerAt = function (index) { - return this[ - index * 4 - 4 + 0]; -}; - -Promise.prototype._rejectionHandlerAt = function (index) { - return this[ - index * 4 - 4 + 1]; -}; - -Promise.prototype._boundValue = function() {}; - -Promise.prototype._migrateCallback0 = function (follower) { - var bitField = follower._bitField; - var fulfill = follower._fulfillmentHandler0; - var reject = follower._rejectionHandler0; - var promise = follower._promise0; - var receiver = follower._receiverAt(0); - if (receiver === undefined) receiver = UNDEFINED_BINDING; - this._addCallbacks(fulfill, reject, promise, receiver, null); -}; - -Promise.prototype._migrateCallbackAt = function (follower, index) { - var fulfill = follower._fulfillmentHandlerAt(index); - var reject = follower._rejectionHandlerAt(index); - var promise = follower._promiseAt(index); - var receiver = follower._receiverAt(index); - if (receiver === undefined) receiver = UNDEFINED_BINDING; - this._addCallbacks(fulfill, reject, promise, receiver, null); -}; - -Promise.prototype._addCallbacks = function ( - fulfill, - reject, - promise, - receiver, - domain -) { - var index = this._length(); - - if (index >= 65535 - 4) { - index = 0; - this._setLength(0); - } - - if (index === 0) { - this._promise0 = promise; - this._receiver0 = receiver; - if (typeof fulfill === "function") { - this._fulfillmentHandler0 = - domain === null ? fulfill : util.domainBind(domain, fulfill); - } - if (typeof reject === "function") { - this._rejectionHandler0 = - domain === null ? reject : util.domainBind(domain, reject); - } - } else { - var base = index * 4 - 4; - this[base + 2] = promise; - this[base + 3] = receiver; - if (typeof fulfill === "function") { - this[base + 0] = - domain === null ? fulfill : util.domainBind(domain, fulfill); - } - if (typeof reject === "function") { - this[base + 1] = - domain === null ? reject : util.domainBind(domain, reject); - } - } - this._setLength(index + 1); - return index; -}; - -Promise.prototype._proxy = function (proxyable, arg) { - this._addCallbacks(undefined, undefined, arg, proxyable, null); -}; - -Promise.prototype._resolveCallback = function(value, shouldBind) { - if (((this._bitField & 117506048) !== 0)) return; - if (value === this) - return this._rejectCallback(makeSelfResolutionError(), false); - var maybePromise = tryConvertToPromise(value, this); - if (!(maybePromise instanceof Promise)) return this._fulfill(value); - - if (shouldBind) this._propagateFrom(maybePromise, 2); - - var promise = maybePromise._target(); - - if (promise === this) { - this._reject(makeSelfResolutionError()); - return; - } - - var bitField = promise._bitField; - if (((bitField & 50397184) === 0)) { - var len = this._length(); - if (len > 0) promise._migrateCallback0(this); - for (var i = 1; i < len; ++i) { - promise._migrateCallbackAt(this, i); - } - this._setFollowing(); - this._setLength(0); - this._setFollowee(promise); - } else if (((bitField & 33554432) !== 0)) { - this._fulfill(promise._value()); - } else if (((bitField & 16777216) !== 0)) { - this._reject(promise._reason()); - } else { - var reason = new CancellationError("late cancellation observer"); - promise._attachExtraTrace(reason); - this._reject(reason); + errorAdapter, + undefined, + this, + nodeback + ); } + return this; }; - -Promise.prototype._rejectCallback = -function(reason, synchronous, ignoreNonErrorWarnings) { - var trace = util.ensureErrorObject(reason); - var hasStack = trace === reason; - if (!hasStack && !ignoreNonErrorWarnings && debug.warnings()) { - var message = "a promise was rejected with a non-error: " + - util.classString(reason); - this._warn(message, true); - } - this._attachExtraTrace(trace, synchronous ? hasStack : false); - this._reject(reason); }; -Promise.prototype._resolveFromExecutor = function (executor) { - var promise = this; - this._captureStackTrace(); - this._pushContext(); - var synchronous = true; - var r = this._execute(executor, function(value) { - promise._resolveCallback(value); - }, function (reason) { - promise._rejectCallback(reason, synchronous); - }); - synchronous = false; - this._popContext(); - if (r !== undefined) { - promise._rejectCallback(r, true); - } -}; +/***/ }), -Promise.prototype._settlePromiseFromHandler = function ( - handler, receiver, value, promise -) { - var bitField = promise._bitField; - if (((bitField & 65536) !== 0)) return; - promise._pushContext(); - var x; - if (receiver === APPLY) { - if (!value || typeof value.length !== "number") { - x = errorObj; - x.e = new TypeError("cannot .spread() a non-array: " + - util.classString(value)); - } else { - x = tryCatch(handler).apply(this._boundValue(), value); - } - } else { - x = tryCatch(handler).call(receiver, value); - } - var promiseCreated = promise._popContext(); - bitField = promise._bitField; - if (((bitField & 65536) !== 0)) return; +/***/ 5810: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (x === NEXT_FILTER) { - promise._reject(value); - } else if (x === errorObj) { - promise._rejectCallback(x.e, false); - } else { - debug.checkForgottenReturns(x, promiseCreated, "", promise, this); - promise._resolveCallback(x); - } -}; -Promise.prototype._target = function() { - var ret = this; - while (ret._isFollowing()) ret = ret._followee(); - return ret; +module.exports = function() { +var makeSelfResolutionError = function () { + return new TypeError("circular promise resolution chain\u000a\u000a See http://goo.gl/MqrFmX\u000a"); }; - -Promise.prototype._followee = function() { - return this._rejectionHandler0; +var reflectHandler = function() { + return new Promise.PromiseInspection(this._target()); }; - -Promise.prototype._setFollowee = function(promise) { - this._rejectionHandler0 = promise; +var apiRejection = function(msg) { + return Promise.reject(new TypeError(msg)); }; +function Proxyable() {} +var UNDEFINED_BINDING = {}; +var util = __nccwpck_require__(2587); -Promise.prototype._settlePromise = function(promise, handler, receiver, value) { - var isPromise = promise instanceof Promise; - var bitField = this._bitField; - var asyncGuaranteed = ((bitField & 134217728) !== 0); - if (((bitField & 65536) !== 0)) { - if (isPromise) promise._invokeInternalOnCancel(); +var getDomain; +if (util.isNode) { + getDomain = function() { + var ret = process.domain; + if (ret === undefined) ret = null; + return ret; + }; +} else { + getDomain = function() { + return null; + }; +} +util.notEnumerableProp(Promise, "_getDomain", getDomain); - if (receiver instanceof PassThroughHandlerContext && - receiver.isFinallyHandler()) { - receiver.cancelPromise = promise; - if (tryCatch(handler).call(receiver, value) === errorObj) { - promise._reject(errorObj.e); - } - } else if (handler === reflectHandler) { - promise._fulfill(reflectHandler.call(receiver)); - } else if (receiver instanceof Proxyable) { - receiver._promiseCancelled(promise); - } else if (isPromise || promise instanceof PromiseArray) { - promise._cancel(); - } else { - receiver.cancel(); - } - } else if (typeof handler === "function") { - if (!isPromise) { - handler.call(receiver, value, promise); - } else { - if (asyncGuaranteed) promise._setAsyncGuaranteed(); - this._settlePromiseFromHandler(handler, receiver, value, promise); - } - } else if (receiver instanceof Proxyable) { - if (!receiver._isResolved()) { - if (((bitField & 33554432) !== 0)) { - receiver._promiseFulfilled(value, promise); - } else { - receiver._promiseRejected(value, promise); - } - } - } else if (isPromise) { - if (asyncGuaranteed) promise._setAsyncGuaranteed(); - if (((bitField & 33554432) !== 0)) { - promise._fulfill(value); - } else { - promise._reject(value); - } +var es5 = __nccwpck_require__(9451); +var Async = __nccwpck_require__(3379); +var async = new Async(); +es5.defineProperty(Promise, "_async", {value: async}); +var errors = __nccwpck_require__(5514); +var TypeError = Promise.TypeError = errors.TypeError; +Promise.RangeError = errors.RangeError; +var CancellationError = Promise.CancellationError = errors.CancellationError; +Promise.TimeoutError = errors.TimeoutError; +Promise.OperationalError = errors.OperationalError; +Promise.RejectionError = errors.OperationalError; +Promise.AggregateError = errors.AggregateError; +var INTERNAL = function(){}; +var APPLY = {}; +var NEXT_FILTER = {}; +var tryConvertToPromise = __nccwpck_require__(3947)(Promise, INTERNAL); +var PromiseArray = + __nccwpck_require__(7151)(Promise, INTERNAL, + tryConvertToPromise, apiRejection, Proxyable); +var Context = __nccwpck_require__(2661)(Promise); + /*jshint unused:false*/ +var createContext = Context.create; +var debug = __nccwpck_require__(699)(Promise, Context); +var CapturedTrace = debug.CapturedTrace; +var PassThroughHandlerContext = + __nccwpck_require__(3254)(Promise, tryConvertToPromise); +var catchFilter = __nccwpck_require__(2704)(NEXT_FILTER); +var nodebackForPromise = __nccwpck_require__(4696); +var errorObj = util.errorObj; +var tryCatch = util.tryCatch; +function check(self, executor) { + if (typeof executor !== "function") { + throw new TypeError("expecting a function but got " + util.classString(executor)); } -}; - -Promise.prototype._settlePromiseLateCancellationObserver = function(ctx) { - var handler = ctx.handler; - var promise = ctx.promise; - var receiver = ctx.receiver; - var value = ctx.value; - if (typeof handler === "function") { - if (!(promise instanceof Promise)) { - handler.call(receiver, value, promise); - } else { - this._settlePromiseFromHandler(handler, receiver, value, promise); - } - } else if (promise instanceof Promise) { - promise._reject(value); + if (self.constructor !== Promise) { + throw new TypeError("the promise constructor cannot be invoked directly\u000a\u000a See http://goo.gl/MqrFmX\u000a"); } -}; - -Promise.prototype._settlePromiseCtx = function(ctx) { - this._settlePromise(ctx.promise, ctx.handler, ctx.receiver, ctx.value); -}; +} -Promise.prototype._settlePromise0 = function(handler, value, bitField) { - var promise = this._promise0; - var receiver = this._receiverAt(0); +function Promise(executor) { + this._bitField = 0; + this._fulfillmentHandler0 = undefined; + this._rejectionHandler0 = undefined; this._promise0 = undefined; this._receiver0 = undefined; - this._settlePromise(promise, handler, receiver, value); -}; + if (executor !== INTERNAL) { + check(this, executor); + this._resolveFromExecutor(executor); + } + this._promiseCreated(); + this._fireEvent("promiseCreated", this); +} -Promise.prototype._clearCallbackDataAtIndex = function(index) { - var base = index * 4 - 4; - this[base + 2] = - this[base + 3] = - this[base + 0] = - this[base + 1] = undefined; +Promise.prototype.toString = function () { + return "[object Promise]"; }; -Promise.prototype._fulfill = function (value) { - var bitField = this._bitField; - if (((bitField & 117506048) >>> 16)) return; - if (value === this) { - var err = makeSelfResolutionError(); - this._attachExtraTrace(err); - return this._reject(err); - } - this._setFulfilled(); - this._rejectionHandler0 = value; - - if ((bitField & 65535) > 0) { - if (((bitField & 134217728) !== 0)) { - this._settlePromises(); - } else { - async.settlePromises(this); +Promise.prototype.caught = Promise.prototype["catch"] = function (fn) { + var len = arguments.length; + if (len > 1) { + var catchInstances = new Array(len - 1), + j = 0, i; + for (i = 0; i < len - 1; ++i) { + var item = arguments[i]; + if (util.isObject(item)) { + catchInstances[j++] = item; + } else { + return apiRejection("expecting an object but got " + + "A catch statement predicate " + util.classString(item)); + } } + catchInstances.length = j; + fn = arguments[i]; + return this.then(undefined, catchFilter(catchInstances, fn, this)); } + return this.then(undefined, fn); }; -Promise.prototype._reject = function (reason) { - var bitField = this._bitField; - if (((bitField & 117506048) >>> 16)) return; - this._setRejected(); - this._fulfillmentHandler0 = reason; - - if (this._isFinal()) { - return async.fatalError(reason, util.isNode); - } +Promise.prototype.reflect = function () { + return this._then(reflectHandler, + reflectHandler, undefined, this, undefined); +}; - if ((bitField & 65535) > 0) { - async.settlePromises(this); - } else { - this._ensurePossibleRejectionHandled(); +Promise.prototype.then = function (didFulfill, didReject) { + if (debug.warnings() && arguments.length > 0 && + typeof didFulfill !== "function" && + typeof didReject !== "function") { + var msg = ".then() only accepts functions but was passed: " + + util.classString(didFulfill); + if (arguments.length > 1) { + msg += ", " + util.classString(didReject); + } + this._warn(msg); } + return this._then(didFulfill, didReject, undefined, undefined, undefined); }; -Promise.prototype._fulfillPromises = function (len, value) { - for (var i = 1; i < len; i++) { - var handler = this._fulfillmentHandlerAt(i); - var promise = this._promiseAt(i); - var receiver = this._receiverAt(i); - this._clearCallbackDataAtIndex(i); - this._settlePromise(promise, handler, receiver, value); - } +Promise.prototype.done = function (didFulfill, didReject) { + var promise = + this._then(didFulfill, didReject, undefined, undefined, undefined); + promise._setIsFinal(); }; -Promise.prototype._rejectPromises = function (len, reason) { - for (var i = 1; i < len; i++) { - var handler = this._rejectionHandlerAt(i); - var promise = this._promiseAt(i); - var receiver = this._receiverAt(i); - this._clearCallbackDataAtIndex(i); - this._settlePromise(promise, handler, receiver, reason); +Promise.prototype.spread = function (fn) { + if (typeof fn !== "function") { + return apiRejection("expecting a function but got " + util.classString(fn)); } + return this.all()._then(fn, undefined, undefined, APPLY, undefined); }; -Promise.prototype._settlePromises = function () { - var bitField = this._bitField; - var len = (bitField & 65535); - - if (len > 0) { - if (((bitField & 16842752) !== 0)) { - var reason = this._fulfillmentHandler0; - this._settlePromise0(this._rejectionHandler0, reason, bitField); - this._rejectPromises(len, reason); - } else { - var value = this._rejectionHandler0; - this._settlePromise0(this._fulfillmentHandler0, value, bitField); - this._fulfillPromises(len, value); - } - this._setLength(0); +Promise.prototype.toJSON = function () { + var ret = { + isFulfilled: false, + isRejected: false, + fulfillmentValue: undefined, + rejectionReason: undefined + }; + if (this.isFulfilled()) { + ret.fulfillmentValue = this.value(); + ret.isFulfilled = true; + } else if (this.isRejected()) { + ret.rejectionReason = this.reason(); + ret.isRejected = true; } - this._clearCancellationData(); + return ret; }; -Promise.prototype._settledValue = function() { - var bitField = this._bitField; - if (((bitField & 33554432) !== 0)) { - return this._rejectionHandler0; - } else if (((bitField & 16777216) !== 0)) { - return this._fulfillmentHandler0; +Promise.prototype.all = function () { + if (arguments.length > 0) { + this._warn(".all() was passed arguments but it does not take any"); } + return new PromiseArray(this).promise(); }; -function deferResolve(v) {this.promise._resolveCallback(v);} -function deferReject(v) {this.promise._rejectCallback(v, false);} - -Promise.defer = Promise.pending = function() { - debug.deprecated("Promise.defer", "new Promise"); - var promise = new Promise(INTERNAL); - return { - promise: promise, - resolve: deferResolve, - reject: deferReject - }; +Promise.prototype.error = function (fn) { + return this.caught(util.originatesFromRejection, fn); }; -util.notEnumerableProp(Promise, - "_makeSelfResolutionError", - makeSelfResolutionError); - -__nccwpck_require__(7415)(Promise, INTERNAL, tryConvertToPromise, apiRejection, - debug); -__nccwpck_require__(3767)(Promise, INTERNAL, tryConvertToPromise, debug); -__nccwpck_require__(6616)(Promise, PromiseArray, apiRejection, debug); -__nccwpck_require__(8277)(Promise); -__nccwpck_require__(6653)(Promise); -__nccwpck_require__(5248)( - Promise, PromiseArray, tryConvertToPromise, INTERNAL, async, getDomain); -Promise.Promise = Promise; -Promise.version = "3.4.7"; -__nccwpck_require__(8150)(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug); -__nccwpck_require__(924)(Promise); -__nccwpck_require__(880)(Promise, apiRejection, tryConvertToPromise, createContext, INTERNAL, debug); -__nccwpck_require__(2114)(Promise, INTERNAL, debug); -__nccwpck_require__(8619)(Promise, apiRejection, INTERNAL, tryConvertToPromise, Proxyable, debug); -__nccwpck_require__(5447)(Promise); -__nccwpck_require__(3047)(Promise, INTERNAL); -__nccwpck_require__(5261)(Promise, PromiseArray, tryConvertToPromise, apiRejection); -__nccwpck_require__(256)(Promise, INTERNAL, tryConvertToPromise, apiRejection); -__nccwpck_require__(8959)(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug); -__nccwpck_require__(6087)(Promise, PromiseArray, debug); -__nccwpck_require__(1156)(Promise, PromiseArray, apiRejection); -__nccwpck_require__(2223)(Promise, INTERNAL); -__nccwpck_require__(838)(Promise, INTERNAL); -__nccwpck_require__(5490)(Promise); - - util.toFastProperties(Promise); - util.toFastProperties(Promise.prototype); - function fillTypes(value) { - var p = new Promise(INTERNAL); - p._fulfillmentHandler0 = value; - p._rejectionHandler0 = value; - p._promise0 = value; - p._receiver0 = value; - } - // Complete slack tracking, opt out of field-type tracking and - // stabilize map - fillTypes({a: 1}); - fillTypes({b: 2}); - fillTypes({c: 3}); - fillTypes(1); - fillTypes(function(){}); - fillTypes(undefined); - fillTypes(false); - fillTypes(new Promise(INTERNAL)); - debug.setBounds(Async.firstLineError, util.lastLineError); - return Promise; +Promise.getNewLibraryCopy = module.exports; +Promise.is = function (val) { + return val instanceof Promise; }; - -/***/ }), - -/***/ 5307: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -module.exports = function(Promise, INTERNAL, tryConvertToPromise, - apiRejection, Proxyable) { -var util = __nccwpck_require__(7448); -var isArray = util.isArray; - -function toResolutionValue(val) { - switch(val) { - case -2: return []; - case -3: return {}; - } -} - -function PromiseArray(values) { - var promise = this._promise = new Promise(INTERNAL); - if (values instanceof Promise) { - promise._propagateFrom(values, 3); +Promise.fromNode = Promise.fromCallback = function(fn) { + var ret = new Promise(INTERNAL); + ret._captureStackTrace(); + var multiArgs = arguments.length > 1 ? !!Object(arguments[1]).multiArgs + : false; + var result = tryCatch(fn)(nodebackForPromise(ret, multiArgs)); + if (result === errorObj) { + ret._rejectCallback(result.e, true); } - promise._setOnCancel(this); - this._values = values; - this._length = 0; - this._totalResolved = 0; - this._init(undefined, -2); -} -util.inherits(PromiseArray, Proxyable); + if (!ret._isFateSealed()) ret._setAsyncGuaranteed(); + return ret; +}; -PromiseArray.prototype.length = function () { - return this._length; +Promise.all = function (promises) { + return new PromiseArray(promises).promise(); }; -PromiseArray.prototype.promise = function () { - return this._promise; +Promise.cast = function (obj) { + var ret = tryConvertToPromise(obj); + if (!(ret instanceof Promise)) { + ret = new Promise(INTERNAL); + ret._captureStackTrace(); + ret._setFulfilled(); + ret._rejectionHandler0 = obj; + } + return ret; }; -PromiseArray.prototype._init = function init(_, resolveValueIfEmpty) { - var values = tryConvertToPromise(this._values, this._promise); - if (values instanceof Promise) { - values = values._target(); - var bitField = values._bitField; - ; - this._values = values; +Promise.resolve = Promise.fulfilled = Promise.cast; - if (((bitField & 50397184) === 0)) { - this._promise._setAsyncGuaranteed(); - return values._then( - init, - this._reject, - undefined, - this, - resolveValueIfEmpty - ); - } else if (((bitField & 33554432) !== 0)) { - values = values._value(); - } else if (((bitField & 16777216) !== 0)) { - return this._reject(values._reason()); - } else { - return this._cancel(); - } - } - values = util.asArray(values); - if (values === null) { - var err = apiRejection( - "expecting an array or an iterable object but got " + util.classString(values)).reason(); - this._promise._rejectCallback(err, false); - return; - } +Promise.reject = Promise.rejected = function (reason) { + var ret = new Promise(INTERNAL); + ret._captureStackTrace(); + ret._rejectCallback(reason, true); + return ret; +}; - if (values.length === 0) { - if (resolveValueIfEmpty === -5) { - this._resolveEmptyArray(); - } - else { - this._resolve(toResolutionValue(resolveValueIfEmpty)); - } - return; +Promise.setScheduler = function(fn) { + if (typeof fn !== "function") { + throw new TypeError("expecting a function but got " + util.classString(fn)); } - this._iterate(values); + return async.setScheduler(fn); }; -PromiseArray.prototype._iterate = function(values) { - var len = this.getActualLength(values.length); - this._length = len; - this._values = this.shouldCopyValues() ? new Array(len) : this._values; - var result = this._promise; - var isResolved = false; - var bitField = null; - for (var i = 0; i < len; ++i) { - var maybePromise = tryConvertToPromise(values[i], result); - - if (maybePromise instanceof Promise) { - maybePromise = maybePromise._target(); - bitField = maybePromise._bitField; - } else { - bitField = null; - } +Promise.prototype._then = function ( + didFulfill, + didReject, + _, receiver, + internalData +) { + var haveInternalData = internalData !== undefined; + var promise = haveInternalData ? internalData : new Promise(INTERNAL); + var target = this._target(); + var bitField = target._bitField; - if (isResolved) { - if (bitField !== null) { - maybePromise.suppressUnhandledRejections(); - } - } else if (bitField !== null) { - if (((bitField & 50397184) === 0)) { - maybePromise._proxy(this, i); - this._values[i] = maybePromise; - } else if (((bitField & 33554432) !== 0)) { - isResolved = this._promiseFulfilled(maybePromise._value(), i); - } else if (((bitField & 16777216) !== 0)) { - isResolved = this._promiseRejected(maybePromise._reason(), i); + if (!haveInternalData) { + promise._propagateFrom(this, 3); + promise._captureStackTrace(); + if (receiver === undefined && + ((this._bitField & 2097152) !== 0)) { + if (!((bitField & 50397184) === 0)) { + receiver = this._boundValue(); } else { - isResolved = this._promiseCancelled(i); + receiver = target === this ? undefined : this._boundTo; } + } + this._fireEvent("promiseChained", this, promise); + } + + var domain = getDomain(); + if (!((bitField & 50397184) === 0)) { + var handler, value, settler = target._settlePromiseCtx; + if (((bitField & 33554432) !== 0)) { + value = target._rejectionHandler0; + handler = didFulfill; + } else if (((bitField & 16777216) !== 0)) { + value = target._fulfillmentHandler0; + handler = didReject; + target._unsetRejectionIsUnhandled(); } else { - isResolved = this._promiseFulfilled(maybePromise, i); + settler = target._settlePromiseLateCancellationObserver; + value = new CancellationError("late cancellation observer"); + target._attachExtraTrace(value); + handler = didReject; } + + async.invoke(settler, target, { + handler: domain === null ? handler + : (typeof handler === "function" && + util.domainBind(domain, handler)), + promise: promise, + receiver: receiver, + value: value + }); + } else { + target._addCallbacks(didFulfill, didReject, promise, receiver, domain); } - if (!isResolved) result._setAsyncGuaranteed(); -}; -PromiseArray.prototype._isResolved = function () { - return this._values === null; + return promise; }; -PromiseArray.prototype._resolve = function (value) { - this._values = null; - this._promise._fulfill(value); +Promise.prototype._length = function () { + return this._bitField & 65535; }; -PromiseArray.prototype._cancel = function() { - if (this._isResolved() || !this._promise._isCancellable()) return; - this._values = null; - this._promise._cancel(); +Promise.prototype._isFateSealed = function () { + return (this._bitField & 117506048) !== 0; }; -PromiseArray.prototype._reject = function (reason) { - this._values = null; - this._promise._rejectCallback(reason, false); +Promise.prototype._isFollowing = function () { + return (this._bitField & 67108864) === 67108864; }; -PromiseArray.prototype._promiseFulfilled = function (value, index) { - this._values[index] = value; - var totalResolved = ++this._totalResolved; - if (totalResolved >= this._length) { - this._resolve(this._values); - return true; - } - return false; +Promise.prototype._setLength = function (len) { + this._bitField = (this._bitField & -65536) | + (len & 65535); }; -PromiseArray.prototype._promiseCancelled = function() { - this._cancel(); - return true; +Promise.prototype._setFulfilled = function () { + this._bitField = this._bitField | 33554432; + this._fireEvent("promiseFulfilled", this); }; -PromiseArray.prototype._promiseRejected = function (reason) { - this._totalResolved++; - this._reject(reason); - return true; +Promise.prototype._setRejected = function () { + this._bitField = this._bitField | 16777216; + this._fireEvent("promiseRejected", this); }; -PromiseArray.prototype._resultCancelled = function() { - if (this._isResolved()) return; - var values = this._values; - this._cancel(); - if (values instanceof Promise) { - values.cancel(); - } else { - for (var i = 0; i < values.length; ++i) { - if (values[i] instanceof Promise) { - values[i].cancel(); - } - } - } +Promise.prototype._setFollowing = function () { + this._bitField = this._bitField | 67108864; + this._fireEvent("promiseResolved", this); }; -PromiseArray.prototype.shouldCopyValues = function () { - return true; +Promise.prototype._setIsFinal = function () { + this._bitField = this._bitField | 4194304; }; -PromiseArray.prototype.getActualLength = function (len) { - return len; +Promise.prototype._isFinal = function () { + return (this._bitField & 4194304) > 0; }; -return PromiseArray; +Promise.prototype._unsetCancelled = function() { + this._bitField = this._bitField & (~65536); }; - -/***/ }), - -/***/ 3047: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -module.exports = function(Promise, INTERNAL) { -var THIS = {}; -var util = __nccwpck_require__(7448); -var nodebackForPromise = __nccwpck_require__(4315); -var withAppended = util.withAppended; -var maybeWrapAsError = util.maybeWrapAsError; -var canEvaluate = util.canEvaluate; -var TypeError = (__nccwpck_require__(5816).TypeError); -var defaultSuffix = "Async"; -var defaultPromisified = {__isPromisified__: true}; -var noCopyProps = [ - "arity", "length", - "name", - "arguments", - "caller", - "callee", - "prototype", - "__isPromisified__" -]; -var noCopyPropsPattern = new RegExp("^(?:" + noCopyProps.join("|") + ")$"); - -var defaultFilter = function(name) { - return util.isIdentifier(name) && - name.charAt(0) !== "_" && - name !== "constructor"; +Promise.prototype._setCancelled = function() { + this._bitField = this._bitField | 65536; + this._fireEvent("promiseCancelled", this); }; -function propsFilter(key) { - return !noCopyPropsPattern.test(key); -} - -function isPromisified(fn) { - try { - return fn.__isPromisified__ === true; - } - catch (e) { - return false; - } -} +Promise.prototype._setWillBeCancelled = function() { + this._bitField = this._bitField | 8388608; +}; -function hasPromisified(obj, key, suffix) { - var val = util.getDataPropertyOrDefault(obj, key + suffix, - defaultPromisified); - return val ? isPromisified(val) : false; -} -function checkValid(ret, suffix, suffixRegexp) { - for (var i = 0; i < ret.length; i += 2) { - var key = ret[i]; - if (suffixRegexp.test(key)) { - var keyWithoutAsyncSuffix = key.replace(suffixRegexp, ""); - for (var j = 0; j < ret.length; j += 2) { - if (ret[j] === keyWithoutAsyncSuffix) { - throw new TypeError("Cannot promisify an API that has normal methods with '%s'-suffix\u000a\u000a See http://goo.gl/MqrFmX\u000a" - .replace("%s", suffix)); - } - } - } - } -} +Promise.prototype._setAsyncGuaranteed = function() { + if (async.hasCustomScheduler()) return; + this._bitField = this._bitField | 134217728; +}; -function promisifiableMethods(obj, suffix, suffixRegexp, filter) { - var keys = util.inheritedDataKeys(obj); - var ret = []; - for (var i = 0; i < keys.length; ++i) { - var key = keys[i]; - var value = obj[key]; - var passesDefaultFilter = filter === defaultFilter - ? true : defaultFilter(key, value, obj); - if (typeof value === "function" && - !isPromisified(value) && - !hasPromisified(obj, key, suffix) && - filter(key, value, obj, passesDefaultFilter)) { - ret.push(key, value); - } +Promise.prototype._receiverAt = function (index) { + var ret = index === 0 ? this._receiver0 : this[ + index * 4 - 4 + 3]; + if (ret === UNDEFINED_BINDING) { + return undefined; + } else if (ret === undefined && this._isBound()) { + return this._boundValue(); } - checkValid(ret, suffix, suffixRegexp); return ret; -} +}; -var escapeIdentRegex = function(str) { - return str.replace(/([$])/, "\\$"); +Promise.prototype._promiseAt = function (index) { + return this[ + index * 4 - 4 + 2]; }; -var makeNodePromisifiedEval; -if (true) { -var switchCaseArgumentOrder = function(likelyArgumentCount) { - var ret = [likelyArgumentCount]; - var min = Math.max(0, likelyArgumentCount - 1 - 3); - for(var i = likelyArgumentCount - 1; i >= min; --i) { - ret.push(i); - } - for(var i = likelyArgumentCount + 1; i <= 3; ++i) { - ret.push(i); - } - return ret; +Promise.prototype._fulfillmentHandlerAt = function (index) { + return this[ + index * 4 - 4 + 0]; }; -var argumentSequence = function(argumentCount) { - return util.filledRange(argumentCount, "_arg", ""); +Promise.prototype._rejectionHandlerAt = function (index) { + return this[ + index * 4 - 4 + 1]; }; -var parameterDeclaration = function(parameterCount) { - return util.filledRange( - Math.max(parameterCount, 3), "_arg", ""); +Promise.prototype._boundValue = function() {}; + +Promise.prototype._migrateCallback0 = function (follower) { + var bitField = follower._bitField; + var fulfill = follower._fulfillmentHandler0; + var reject = follower._rejectionHandler0; + var promise = follower._promise0; + var receiver = follower._receiverAt(0); + if (receiver === undefined) receiver = UNDEFINED_BINDING; + this._addCallbacks(fulfill, reject, promise, receiver, null); }; -var parameterCount = function(fn) { - if (typeof fn.length === "number") { - return Math.max(Math.min(fn.length, 1023 + 1), 0); - } - return 0; +Promise.prototype._migrateCallbackAt = function (follower, index) { + var fulfill = follower._fulfillmentHandlerAt(index); + var reject = follower._rejectionHandlerAt(index); + var promise = follower._promiseAt(index); + var receiver = follower._receiverAt(index); + if (receiver === undefined) receiver = UNDEFINED_BINDING; + this._addCallbacks(fulfill, reject, promise, receiver, null); }; -makeNodePromisifiedEval = -function(callback, receiver, originalName, fn, _, multiArgs) { - var newParameterCount = Math.max(0, parameterCount(fn) - 1); - var argumentOrder = switchCaseArgumentOrder(newParameterCount); - var shouldProxyThis = typeof callback === "string" || receiver === THIS; +Promise.prototype._addCallbacks = function ( + fulfill, + reject, + promise, + receiver, + domain +) { + var index = this._length(); - function generateCallForArgumentCount(count) { - var args = argumentSequence(count).join(", "); - var comma = count > 0 ? ", " : ""; - var ret; - if (shouldProxyThis) { - ret = "ret = callback.call(this, {{args}}, nodeback); break;\n"; - } else { - ret = receiver === undefined - ? "ret = callback({{args}}, nodeback); break;\n" - : "ret = callback.call(receiver, {{args}}, nodeback); break;\n"; - } - return ret.replace("{{args}}", args).replace(", ", comma); + if (index >= 65535 - 4) { + index = 0; + this._setLength(0); } - function generateArgumentSwitchCase() { - var ret = ""; - for (var i = 0; i < argumentOrder.length; ++i) { - ret += "case " + argumentOrder[i] +":" + - generateCallForArgumentCount(argumentOrder[i]); + if (index === 0) { + this._promise0 = promise; + this._receiver0 = receiver; + if (typeof fulfill === "function") { + this._fulfillmentHandler0 = + domain === null ? fulfill : util.domainBind(domain, fulfill); + } + if (typeof reject === "function") { + this._rejectionHandler0 = + domain === null ? reject : util.domainBind(domain, reject); + } + } else { + var base = index * 4 - 4; + this[base + 2] = promise; + this[base + 3] = receiver; + if (typeof fulfill === "function") { + this[base + 0] = + domain === null ? fulfill : util.domainBind(domain, fulfill); + } + if (typeof reject === "function") { + this[base + 1] = + domain === null ? reject : util.domainBind(domain, reject); } - - ret += " \n\ - default: \n\ - var args = new Array(len + 1); \n\ - var i = 0; \n\ - for (var i = 0; i < len; ++i) { \n\ - args[i] = arguments[i]; \n\ - } \n\ - args[i] = nodeback; \n\ - [CodeForCall] \n\ - break; \n\ - ".replace("[CodeForCall]", (shouldProxyThis - ? "ret = callback.apply(this, args);\n" - : "ret = callback.apply(receiver, args);\n")); - return ret; } + this._setLength(index + 1); + return index; +}; - var getFunctionCode = typeof callback === "string" - ? ("this != null ? this['"+callback+"'] : fn") - : "fn"; - var body = "'use strict'; \n\ - var ret = function (Parameters) { \n\ - 'use strict'; \n\ - var len = arguments.length; \n\ - var promise = new Promise(INTERNAL); \n\ - promise._captureStackTrace(); \n\ - var nodeback = nodebackForPromise(promise, " + multiArgs + "); \n\ - var ret; \n\ - var callback = tryCatch([GetFunctionCode]); \n\ - switch(len) { \n\ - [CodeForSwitchCase] \n\ - } \n\ - if (ret === errorObj) { \n\ - promise._rejectCallback(maybeWrapAsError(ret.e), true, true);\n\ - } \n\ - if (!promise._isFateSealed()) promise._setAsyncGuaranteed(); \n\ - return promise; \n\ - }; \n\ - notEnumerableProp(ret, '__isPromisified__', true); \n\ - return ret; \n\ - ".replace("[CodeForSwitchCase]", generateArgumentSwitchCase()) - .replace("[GetFunctionCode]", getFunctionCode); - body = body.replace("Parameters", parameterDeclaration(newParameterCount)); - return new Function("Promise", - "fn", - "receiver", - "withAppended", - "maybeWrapAsError", - "nodebackForPromise", - "tryCatch", - "errorObj", - "notEnumerableProp", - "INTERNAL", - body)( - Promise, - fn, - receiver, - withAppended, - maybeWrapAsError, - nodebackForPromise, - util.tryCatch, - util.errorObj, - util.notEnumerableProp, - INTERNAL); +Promise.prototype._proxy = function (proxyable, arg) { + this._addCallbacks(undefined, undefined, arg, proxyable, null); }; -} -function makeNodePromisifiedClosure(callback, receiver, _, fn, __, multiArgs) { - var defaultThis = (function() {return this;})(); - var method = callback; - if (typeof method === "string") { - callback = fn; +Promise.prototype._resolveCallback = function(value, shouldBind) { + if (((this._bitField & 117506048) !== 0)) return; + if (value === this) + return this._rejectCallback(makeSelfResolutionError(), false); + var maybePromise = tryConvertToPromise(value, this); + if (!(maybePromise instanceof Promise)) return this._fulfill(value); + + if (shouldBind) this._propagateFrom(maybePromise, 2); + + var promise = maybePromise._target(); + + if (promise === this) { + this._reject(makeSelfResolutionError()); + return; } - function promisified() { - var _receiver = receiver; - if (receiver === THIS) _receiver = this; - var promise = new Promise(INTERNAL); - promise._captureStackTrace(); - var cb = typeof method === "string" && this !== defaultThis - ? this[method] : callback; - var fn = nodebackForPromise(promise, multiArgs); - try { - cb.apply(_receiver, withAppended(arguments, fn)); - } catch(e) { - promise._rejectCallback(maybeWrapAsError(e), true, true); + + var bitField = promise._bitField; + if (((bitField & 50397184) === 0)) { + var len = this._length(); + if (len > 0) promise._migrateCallback0(this); + for (var i = 1; i < len; ++i) { + promise._migrateCallbackAt(this, i); } - if (!promise._isFateSealed()) promise._setAsyncGuaranteed(); - return promise; + this._setFollowing(); + this._setLength(0); + this._setFollowee(promise); + } else if (((bitField & 33554432) !== 0)) { + this._fulfill(promise._value()); + } else if (((bitField & 16777216) !== 0)) { + this._reject(promise._reason()); + } else { + var reason = new CancellationError("late cancellation observer"); + promise._attachExtraTrace(reason); + this._reject(reason); } - util.notEnumerableProp(promisified, "__isPromisified__", true); - return promisified; -} +}; -var makeNodePromisified = canEvaluate - ? makeNodePromisifiedEval - : makeNodePromisifiedClosure; +Promise.prototype._rejectCallback = +function(reason, synchronous, ignoreNonErrorWarnings) { + var trace = util.ensureErrorObject(reason); + var hasStack = trace === reason; + if (!hasStack && !ignoreNonErrorWarnings && debug.warnings()) { + var message = "a promise was rejected with a non-error: " + + util.classString(reason); + this._warn(message, true); + } + this._attachExtraTrace(trace, synchronous ? hasStack : false); + this._reject(reason); +}; -function promisifyAll(obj, suffix, filter, promisifier, multiArgs) { - var suffixRegexp = new RegExp(escapeIdentRegex(suffix) + "$"); - var methods = - promisifiableMethods(obj, suffix, suffixRegexp, filter); +Promise.prototype._resolveFromExecutor = function (executor) { + var promise = this; + this._captureStackTrace(); + this._pushContext(); + var synchronous = true; + var r = this._execute(executor, function(value) { + promise._resolveCallback(value); + }, function (reason) { + promise._rejectCallback(reason, synchronous); + }); + synchronous = false; + this._popContext(); - for (var i = 0, len = methods.length; i < len; i+= 2) { - var key = methods[i]; - var fn = methods[i+1]; - var promisifiedKey = key + suffix; - if (promisifier === makeNodePromisified) { - obj[promisifiedKey] = - makeNodePromisified(key, THIS, key, fn, suffix, multiArgs); - } else { - var promisified = promisifier(fn, function() { - return makeNodePromisified(key, THIS, key, - fn, suffix, multiArgs); - }); - util.notEnumerableProp(promisified, "__isPromisified__", true); - obj[promisifiedKey] = promisified; - } + if (r !== undefined) { + promise._rejectCallback(r, true); } - util.toFastProperties(obj); - return obj; -} - -function promisify(callback, receiver, multiArgs) { - return makeNodePromisified(callback, receiver, undefined, - callback, null, multiArgs); -} +}; -Promise.promisify = function (fn, options) { - if (typeof fn !== "function") { - throw new TypeError("expecting a function but got " + util.classString(fn)); +Promise.prototype._settlePromiseFromHandler = function ( + handler, receiver, value, promise +) { + var bitField = promise._bitField; + if (((bitField & 65536) !== 0)) return; + promise._pushContext(); + var x; + if (receiver === APPLY) { + if (!value || typeof value.length !== "number") { + x = errorObj; + x.e = new TypeError("cannot .spread() a non-array: " + + util.classString(value)); + } else { + x = tryCatch(handler).apply(this._boundValue(), value); + } + } else { + x = tryCatch(handler).call(receiver, value); } - if (isPromisified(fn)) { - return fn; + var promiseCreated = promise._popContext(); + bitField = promise._bitField; + if (((bitField & 65536) !== 0)) return; + + if (x === NEXT_FILTER) { + promise._reject(value); + } else if (x === errorObj) { + promise._rejectCallback(x.e, false); + } else { + debug.checkForgottenReturns(x, promiseCreated, "", promise, this); + promise._resolveCallback(x); } - options = Object(options); - var receiver = options.context === undefined ? THIS : options.context; - var multiArgs = !!options.multiArgs; - var ret = promisify(fn, receiver, multiArgs); - util.copyDescriptors(fn, ret, propsFilter); +}; + +Promise.prototype._target = function() { + var ret = this; + while (ret._isFollowing()) ret = ret._followee(); return ret; }; -Promise.promisifyAll = function (target, options) { - if (typeof target !== "function" && typeof target !== "object") { - throw new TypeError("the target of promisifyAll must be an object or a function\u000a\u000a See http://goo.gl/MqrFmX\u000a"); - } - options = Object(options); - var multiArgs = !!options.multiArgs; - var suffix = options.suffix; - if (typeof suffix !== "string") suffix = defaultSuffix; - var filter = options.filter; - if (typeof filter !== "function") filter = defaultFilter; - var promisifier = options.promisifier; - if (typeof promisifier !== "function") promisifier = makeNodePromisified; +Promise.prototype._followee = function() { + return this._rejectionHandler0; +}; - if (!util.isIdentifier(suffix)) { - throw new RangeError("suffix must be a valid identifier\u000a\u000a See http://goo.gl/MqrFmX\u000a"); - } +Promise.prototype._setFollowee = function(promise) { + this._rejectionHandler0 = promise; +}; - var keys = util.inheritedDataKeys(target); - for (var i = 0; i < keys.length; ++i) { - var value = target[keys[i]]; - if (keys[i] !== "constructor" && - util.isClass(value)) { - promisifyAll(value.prototype, suffix, filter, promisifier, - multiArgs); - promisifyAll(value, suffix, filter, promisifier, multiArgs); +Promise.prototype._settlePromise = function(promise, handler, receiver, value) { + var isPromise = promise instanceof Promise; + var bitField = this._bitField; + var asyncGuaranteed = ((bitField & 134217728) !== 0); + if (((bitField & 65536) !== 0)) { + if (isPromise) promise._invokeInternalOnCancel(); + + if (receiver instanceof PassThroughHandlerContext && + receiver.isFinallyHandler()) { + receiver.cancelPromise = promise; + if (tryCatch(handler).call(receiver, value) === errorObj) { + promise._reject(errorObj.e); + } + } else if (handler === reflectHandler) { + promise._fulfill(reflectHandler.call(receiver)); + } else if (receiver instanceof Proxyable) { + receiver._promiseCancelled(promise); + } else if (isPromise || promise instanceof PromiseArray) { + promise._cancel(); + } else { + receiver.cancel(); + } + } else if (typeof handler === "function") { + if (!isPromise) { + handler.call(receiver, value, promise); + } else { + if (asyncGuaranteed) promise._setAsyncGuaranteed(); + this._settlePromiseFromHandler(handler, receiver, value, promise); + } + } else if (receiver instanceof Proxyable) { + if (!receiver._isResolved()) { + if (((bitField & 33554432) !== 0)) { + receiver._promiseFulfilled(value, promise); + } else { + receiver._promiseRejected(value, promise); + } + } + } else if (isPromise) { + if (asyncGuaranteed) promise._setAsyncGuaranteed(); + if (((bitField & 33554432) !== 0)) { + promise._fulfill(value); + } else { + promise._reject(value); } } - - return promisifyAll(target, suffix, filter, promisifier, multiArgs); -}; }; +Promise.prototype._settlePromiseLateCancellationObserver = function(ctx) { + var handler = ctx.handler; + var promise = ctx.promise; + var receiver = ctx.receiver; + var value = ctx.value; + if (typeof handler === "function") { + if (!(promise instanceof Promise)) { + handler.call(receiver, value, promise); + } else { + this._settlePromiseFromHandler(handler, receiver, value, promise); + } + } else if (promise instanceof Promise) { + promise._reject(value); + } +}; +Promise.prototype._settlePromiseCtx = function(ctx) { + this._settlePromise(ctx.promise, ctx.handler, ctx.receiver, ctx.value); +}; -/***/ }), +Promise.prototype._settlePromise0 = function(handler, value, bitField) { + var promise = this._promise0; + var receiver = this._receiverAt(0); + this._promise0 = undefined; + this._receiver0 = undefined; + this._settlePromise(promise, handler, receiver, value); +}; -/***/ 5261: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Promise.prototype._clearCallbackDataAtIndex = function(index) { + var base = index * 4 - 4; + this[base + 2] = + this[base + 3] = + this[base + 0] = + this[base + 1] = undefined; +}; +Promise.prototype._fulfill = function (value) { + var bitField = this._bitField; + if (((bitField & 117506048) >>> 16)) return; + if (value === this) { + var err = makeSelfResolutionError(); + this._attachExtraTrace(err); + return this._reject(err); + } + this._setFulfilled(); + this._rejectionHandler0 = value; -module.exports = function( - Promise, PromiseArray, tryConvertToPromise, apiRejection) { -var util = __nccwpck_require__(7448); -var isObject = util.isObject; -var es5 = __nccwpck_require__(3062); -var Es6Map; -if (typeof Map === "function") Es6Map = Map; + if ((bitField & 65535) > 0) { + if (((bitField & 134217728) !== 0)) { + this._settlePromises(); + } else { + async.settlePromises(this); + } + } +}; -var mapToEntries = (function() { - var index = 0; - var size = 0; +Promise.prototype._reject = function (reason) { + var bitField = this._bitField; + if (((bitField & 117506048) >>> 16)) return; + this._setRejected(); + this._fulfillmentHandler0 = reason; - function extractEntry(value, key) { - this[index] = value; - this[index + size] = key; - index++; + if (this._isFinal()) { + return async.fatalError(reason, util.isNode); } - return function mapToEntries(map) { - size = map.size; - index = 0; - var ret = new Array(map.size * 2); - map.forEach(extractEntry, ret); - return ret; - }; -})(); + if ((bitField & 65535) > 0) { + async.settlePromises(this); + } else { + this._ensurePossibleRejectionHandled(); + } +}; -var entriesToMap = function(entries) { - var ret = new Es6Map(); - var length = entries.length / 2 | 0; - for (var i = 0; i < length; ++i) { - var key = entries[length + i]; - var value = entries[i]; - ret.set(key, value); +Promise.prototype._fulfillPromises = function (len, value) { + for (var i = 1; i < len; i++) { + var handler = this._fulfillmentHandlerAt(i); + var promise = this._promiseAt(i); + var receiver = this._receiverAt(i); + this._clearCallbackDataAtIndex(i); + this._settlePromise(promise, handler, receiver, value); } - return ret; }; -function PropertiesPromiseArray(obj) { - var isMap = false; - var entries; - if (Es6Map !== undefined && obj instanceof Es6Map) { - entries = mapToEntries(obj); - isMap = true; - } else { - var keys = es5.keys(obj); - var len = keys.length; - entries = new Array(len * 2); - for (var i = 0; i < len; ++i) { - var key = keys[i]; - entries[i] = obj[key]; - entries[i + len] = key; - } +Promise.prototype._rejectPromises = function (len, reason) { + for (var i = 1; i < len; i++) { + var handler = this._rejectionHandlerAt(i); + var promise = this._promiseAt(i); + var receiver = this._receiverAt(i); + this._clearCallbackDataAtIndex(i); + this._settlePromise(promise, handler, receiver, reason); } - this.constructor$(entries); - this._isMap = isMap; - this._init$(undefined, -3); -} -util.inherits(PropertiesPromiseArray, PromiseArray); +}; -PropertiesPromiseArray.prototype._init = function () {}; +Promise.prototype._settlePromises = function () { + var bitField = this._bitField; + var len = (bitField & 65535); -PropertiesPromiseArray.prototype._promiseFulfilled = function (value, index) { - this._values[index] = value; - var totalResolved = ++this._totalResolved; - if (totalResolved >= this._length) { - var val; - if (this._isMap) { - val = entriesToMap(this._values); + if (len > 0) { + if (((bitField & 16842752) !== 0)) { + var reason = this._fulfillmentHandler0; + this._settlePromise0(this._rejectionHandler0, reason, bitField); + this._rejectPromises(len, reason); } else { - val = {}; - var keyOffset = this.length(); - for (var i = 0, len = this.length(); i < len; ++i) { - val[this._values[i + keyOffset]] = this._values[i]; - } + var value = this._rejectionHandler0; + this._settlePromise0(this._fulfillmentHandler0, value, bitField); + this._fulfillPromises(len, value); } - this._resolve(val); - return true; + this._setLength(0); } - return false; -}; - -PropertiesPromiseArray.prototype.shouldCopyValues = function () { - return false; + this._clearCancellationData(); }; -PropertiesPromiseArray.prototype.getActualLength = function (len) { - return len >> 1; +Promise.prototype._settledValue = function() { + var bitField = this._bitField; + if (((bitField & 33554432) !== 0)) { + return this._rejectionHandler0; + } else if (((bitField & 16777216) !== 0)) { + return this._fulfillmentHandler0; + } }; -function props(promises) { - var ret; - var castValue = tryConvertToPromise(promises); +function deferResolve(v) {this.promise._resolveCallback(v);} +function deferReject(v) {this.promise._rejectCallback(v, false);} - if (!isObject(castValue)) { - return apiRejection("cannot await properties of a non-object\u000a\u000a See http://goo.gl/MqrFmX\u000a"); - } else if (castValue instanceof Promise) { - ret = castValue._then( - Promise.props, undefined, undefined, undefined, undefined); - } else { - ret = new PropertiesPromiseArray(castValue).promise(); - } +Promise.defer = Promise.pending = function() { + debug.deprecated("Promise.defer", "new Promise"); + var promise = new Promise(INTERNAL); + return { + promise: promise, + resolve: deferResolve, + reject: deferReject + }; +}; - if (castValue instanceof Promise) { - ret._propagateFrom(castValue, 2); - } - return ret; -} +util.notEnumerableProp(Promise, + "_makeSelfResolutionError", + makeSelfResolutionError); -Promise.prototype.props = function () { - return props(this); -}; +__nccwpck_require__(1610)(Promise, INTERNAL, tryConvertToPromise, apiRejection, + debug); +__nccwpck_require__(276)(Promise, INTERNAL, tryConvertToPromise, debug); +__nccwpck_require__(8087)(Promise, PromiseArray, apiRejection, debug); +__nccwpck_require__(3781)(Promise); +__nccwpck_require__(4639)(Promise); +__nccwpck_require__(6188)( + Promise, PromiseArray, tryConvertToPromise, INTERNAL, async, getDomain); +Promise.Promise = Promise; +Promise.version = "3.4.7"; +__nccwpck_require__(7908)(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug); +__nccwpck_require__(367)(Promise); +__nccwpck_require__(8433)(Promise, apiRejection, tryConvertToPromise, createContext, INTERNAL, debug); +__nccwpck_require__(3651)(Promise, INTERNAL, debug); +__nccwpck_require__(60)(Promise, apiRejection, INTERNAL, tryConvertToPromise, Proxyable, debug); +__nccwpck_require__(8936)(Promise); +__nccwpck_require__(7507)(Promise, INTERNAL); +__nccwpck_require__(8272)(Promise, PromiseArray, tryConvertToPromise, apiRejection); +__nccwpck_require__(9133)(Promise, INTERNAL, tryConvertToPromise, apiRejection); +__nccwpck_require__(5372)(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug); +__nccwpck_require__(1012)(Promise, PromiseArray, debug); +__nccwpck_require__(3337)(Promise, PromiseArray, apiRejection); +__nccwpck_require__(6630)(Promise, INTERNAL); +__nccwpck_require__(8706)(Promise, INTERNAL); +__nccwpck_require__(6201)(Promise); + + util.toFastProperties(Promise); + util.toFastProperties(Promise.prototype); + function fillTypes(value) { + var p = new Promise(INTERNAL); + p._fulfillmentHandler0 = value; + p._rejectionHandler0 = value; + p._promise0 = value; + p._receiver0 = value; + } + // Complete slack tracking, opt out of field-type tracking and + // stabilize map + fillTypes({a: 1}); + fillTypes({b: 2}); + fillTypes({c: 3}); + fillTypes(1); + fillTypes(function(){}); + fillTypes(undefined); + fillTypes(false); + fillTypes(new Promise(INTERNAL)); + debug.setBounds(Async.firstLineError, util.lastLineError); + return Promise; -Promise.props = function (promises) { - return props(promises); -}; }; /***/ }), -/***/ 878: -/***/ ((module) => { +/***/ 7151: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function arrayMove(src, srcIndex, dst, dstIndex, len) { - for (var j = 0; j < len; ++j) { - dst[j + dstIndex] = src[j + srcIndex]; - src[j + srcIndex] = void 0; +module.exports = function(Promise, INTERNAL, tryConvertToPromise, + apiRejection, Proxyable) { +var util = __nccwpck_require__(2587); +var isArray = util.isArray; + +function toResolutionValue(val) { + switch(val) { + case -2: return []; + case -3: return {}; } } -function Queue(capacity) { - this._capacity = capacity; +function PromiseArray(values) { + var promise = this._promise = new Promise(INTERNAL); + if (values instanceof Promise) { + promise._propagateFrom(values, 3); + } + promise._setOnCancel(this); + this._values = values; this._length = 0; - this._front = 0; + this._totalResolved = 0; + this._init(undefined, -2); } +util.inherits(PromiseArray, Proxyable); -Queue.prototype._willBeOverCapacity = function (size) { - return this._capacity < size; +PromiseArray.prototype.length = function () { + return this._length; }; -Queue.prototype._pushOne = function (arg) { - var length = this.length(); - this._checkCapacity(length + 1); - var i = (this._front + length) & (this._capacity - 1); - this[i] = arg; - this._length = length + 1; +PromiseArray.prototype.promise = function () { + return this._promise; }; -Queue.prototype.push = function (fn, receiver, arg) { - var length = this.length() + 3; - if (this._willBeOverCapacity(length)) { - this._pushOne(fn); - this._pushOne(receiver); - this._pushOne(arg); +PromiseArray.prototype._init = function init(_, resolveValueIfEmpty) { + var values = tryConvertToPromise(this._values, this._promise); + if (values instanceof Promise) { + values = values._target(); + var bitField = values._bitField; + ; + this._values = values; + + if (((bitField & 50397184) === 0)) { + this._promise._setAsyncGuaranteed(); + return values._then( + init, + this._reject, + undefined, + this, + resolveValueIfEmpty + ); + } else if (((bitField & 33554432) !== 0)) { + values = values._value(); + } else if (((bitField & 16777216) !== 0)) { + return this._reject(values._reason()); + } else { + return this._cancel(); + } + } + values = util.asArray(values); + if (values === null) { + var err = apiRejection( + "expecting an array or an iterable object but got " + util.classString(values)).reason(); + this._promise._rejectCallback(err, false); return; } - var j = this._front + length - 3; - this._checkCapacity(length); - var wrapMask = this._capacity - 1; - this[(j + 0) & wrapMask] = fn; - this[(j + 1) & wrapMask] = receiver; - this[(j + 2) & wrapMask] = arg; - this._length = length; -}; - -Queue.prototype.shift = function () { - var front = this._front, - ret = this[front]; - this[front] = undefined; - this._front = (front + 1) & (this._capacity - 1); - this._length--; - return ret; + if (values.length === 0) { + if (resolveValueIfEmpty === -5) { + this._resolveEmptyArray(); + } + else { + this._resolve(toResolutionValue(resolveValueIfEmpty)); + } + return; + } + this._iterate(values); }; -Queue.prototype.length = function () { - return this._length; -}; +PromiseArray.prototype._iterate = function(values) { + var len = this.getActualLength(values.length); + this._length = len; + this._values = this.shouldCopyValues() ? new Array(len) : this._values; + var result = this._promise; + var isResolved = false; + var bitField = null; + for (var i = 0; i < len; ++i) { + var maybePromise = tryConvertToPromise(values[i], result); -Queue.prototype._checkCapacity = function (size) { - if (this._capacity < size) { - this._resizeTo(this._capacity << 1); + if (maybePromise instanceof Promise) { + maybePromise = maybePromise._target(); + bitField = maybePromise._bitField; + } else { + bitField = null; + } + + if (isResolved) { + if (bitField !== null) { + maybePromise.suppressUnhandledRejections(); + } + } else if (bitField !== null) { + if (((bitField & 50397184) === 0)) { + maybePromise._proxy(this, i); + this._values[i] = maybePromise; + } else if (((bitField & 33554432) !== 0)) { + isResolved = this._promiseFulfilled(maybePromise._value(), i); + } else if (((bitField & 16777216) !== 0)) { + isResolved = this._promiseRejected(maybePromise._reason(), i); + } else { + isResolved = this._promiseCancelled(i); + } + } else { + isResolved = this._promiseFulfilled(maybePromise, i); + } } + if (!isResolved) result._setAsyncGuaranteed(); }; -Queue.prototype._resizeTo = function (capacity) { - var oldCapacity = this._capacity; - this._capacity = capacity; - var front = this._front; - var length = this._length; - var moveItemsCount = (front + length) & (oldCapacity - 1); - arrayMove(this, 0, this, oldCapacity, moveItemsCount); +PromiseArray.prototype._isResolved = function () { + return this._values === null; }; -module.exports = Queue; - - -/***/ }), - -/***/ 256: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -module.exports = function( - Promise, INTERNAL, tryConvertToPromise, apiRejection) { -var util = __nccwpck_require__(7448); +PromiseArray.prototype._resolve = function (value) { + this._values = null; + this._promise._fulfill(value); +}; -var raceLater = function (promise) { - return promise.then(function(array) { - return race(array, promise); - }); +PromiseArray.prototype._cancel = function() { + if (this._isResolved() || !this._promise._isCancellable()) return; + this._values = null; + this._promise._cancel(); }; -function race(promises, parent) { - var maybePromise = tryConvertToPromise(promises); +PromiseArray.prototype._reject = function (reason) { + this._values = null; + this._promise._rejectCallback(reason, false); +}; - if (maybePromise instanceof Promise) { - return raceLater(maybePromise); - } else { - promises = util.asArray(promises); - if (promises === null) - return apiRejection("expecting an array or an iterable object but got " + util.classString(promises)); +PromiseArray.prototype._promiseFulfilled = function (value, index) { + this._values[index] = value; + var totalResolved = ++this._totalResolved; + if (totalResolved >= this._length) { + this._resolve(this._values); + return true; } + return false; +}; - var ret = new Promise(INTERNAL); - if (parent !== undefined) { - ret._propagateFrom(parent, 3); - } - var fulfill = ret._fulfill; - var reject = ret._reject; - for (var i = 0, len = promises.length; i < len; ++i) { - var val = promises[i]; +PromiseArray.prototype._promiseCancelled = function() { + this._cancel(); + return true; +}; - if (val === undefined && !(i in promises)) { - continue; - } +PromiseArray.prototype._promiseRejected = function (reason) { + this._totalResolved++; + this._reject(reason); + return true; +}; - Promise.cast(val)._then(fulfill, reject, undefined, ret, null); +PromiseArray.prototype._resultCancelled = function() { + if (this._isResolved()) return; + var values = this._values; + this._cancel(); + if (values instanceof Promise) { + values.cancel(); + } else { + for (var i = 0; i < values.length; ++i) { + if (values[i] instanceof Promise) { + values[i].cancel(); + } + } } - return ret; -} +}; -Promise.race = function (promises) { - return race(promises, undefined); +PromiseArray.prototype.shouldCopyValues = function () { + return true; }; -Promise.prototype.race = function () { - return race(this, undefined); +PromiseArray.prototype.getActualLength = function (len) { + return len; }; +return PromiseArray; }; /***/ }), -/***/ 8959: +/***/ 7507: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = function(Promise, - PromiseArray, - apiRejection, - tryConvertToPromise, - INTERNAL, - debug) { -var getDomain = Promise._getDomain; -var util = __nccwpck_require__(7448); -var tryCatch = util.tryCatch; +module.exports = function(Promise, INTERNAL) { +var THIS = {}; +var util = __nccwpck_require__(2587); +var nodebackForPromise = __nccwpck_require__(4696); +var withAppended = util.withAppended; +var maybeWrapAsError = util.maybeWrapAsError; +var canEvaluate = util.canEvaluate; +var TypeError = (__nccwpck_require__(5514).TypeError); +var defaultSuffix = "Async"; +var defaultPromisified = {__isPromisified__: true}; +var noCopyProps = [ + "arity", "length", + "name", + "arguments", + "caller", + "callee", + "prototype", + "__isPromisified__" +]; +var noCopyPropsPattern = new RegExp("^(?:" + noCopyProps.join("|") + ")$"); -function ReductionPromiseArray(promises, fn, initialValue, _each) { - this.constructor$(promises); - var domain = getDomain(); - this._fn = domain === null ? fn : util.domainBind(domain, fn); - if (initialValue !== undefined) { - initialValue = Promise.resolve(initialValue); - initialValue._attachCancellationCallback(this); +var defaultFilter = function(name) { + return util.isIdentifier(name) && + name.charAt(0) !== "_" && + name !== "constructor"; +}; + +function propsFilter(key) { + return !noCopyPropsPattern.test(key); +} + +function isPromisified(fn) { + try { + return fn.__isPromisified__ === true; } - this._initialValue = initialValue; - this._currentCancellable = null; - if(_each === INTERNAL) { - this._eachValues = Array(this._length); - } else if (_each === 0) { - this._eachValues = null; - } else { - this._eachValues = undefined; + catch (e) { + return false; } - this._promise._captureStackTrace(); - this._init$(undefined, -5); } -util.inherits(ReductionPromiseArray, PromiseArray); -ReductionPromiseArray.prototype._gotAccum = function(accum) { - if (this._eachValues !== undefined && - this._eachValues !== null && - accum !== INTERNAL) { - this._eachValues.push(accum); +function hasPromisified(obj, key, suffix) { + var val = util.getDataPropertyOrDefault(obj, key + suffix, + defaultPromisified); + return val ? isPromisified(val) : false; +} +function checkValid(ret, suffix, suffixRegexp) { + for (var i = 0; i < ret.length; i += 2) { + var key = ret[i]; + if (suffixRegexp.test(key)) { + var keyWithoutAsyncSuffix = key.replace(suffixRegexp, ""); + for (var j = 0; j < ret.length; j += 2) { + if (ret[j] === keyWithoutAsyncSuffix) { + throw new TypeError("Cannot promisify an API that has normal methods with '%s'-suffix\u000a\u000a See http://goo.gl/MqrFmX\u000a" + .replace("%s", suffix)); + } + } + } } -}; +} -ReductionPromiseArray.prototype._eachComplete = function(value) { - if (this._eachValues !== null) { - this._eachValues.push(value); +function promisifiableMethods(obj, suffix, suffixRegexp, filter) { + var keys = util.inheritedDataKeys(obj); + var ret = []; + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var value = obj[key]; + var passesDefaultFilter = filter === defaultFilter + ? true : defaultFilter(key, value, obj); + if (typeof value === "function" && + !isPromisified(value) && + !hasPromisified(obj, key, suffix) && + filter(key, value, obj, passesDefaultFilter)) { + ret.push(key, value); + } } - return this._eachValues; -}; - -ReductionPromiseArray.prototype._init = function() {}; + checkValid(ret, suffix, suffixRegexp); + return ret; +} -ReductionPromiseArray.prototype._resolveEmptyArray = function() { - this._resolve(this._eachValues !== undefined ? this._eachValues - : this._initialValue); +var escapeIdentRegex = function(str) { + return str.replace(/([$])/, "\\$"); }; -ReductionPromiseArray.prototype.shouldCopyValues = function () { - return false; +var makeNodePromisifiedEval; +if (true) { +var switchCaseArgumentOrder = function(likelyArgumentCount) { + var ret = [likelyArgumentCount]; + var min = Math.max(0, likelyArgumentCount - 1 - 3); + for(var i = likelyArgumentCount - 1; i >= min; --i) { + ret.push(i); + } + for(var i = likelyArgumentCount + 1; i <= 3; ++i) { + ret.push(i); + } + return ret; }; -ReductionPromiseArray.prototype._resolve = function(value) { - this._promise._resolveCallback(value); - this._values = null; +var argumentSequence = function(argumentCount) { + return util.filledRange(argumentCount, "_arg", ""); }; -ReductionPromiseArray.prototype._resultCancelled = function(sender) { - if (sender === this._initialValue) return this._cancel(); - if (this._isResolved()) return; - this._resultCancelled$(); - if (this._currentCancellable instanceof Promise) { - this._currentCancellable.cancel(); - } - if (this._initialValue instanceof Promise) { - this._initialValue.cancel(); - } +var parameterDeclaration = function(parameterCount) { + return util.filledRange( + Math.max(parameterCount, 3), "_arg", ""); }; -ReductionPromiseArray.prototype._iterate = function (values) { - this._values = values; - var value; - var i; - var length = values.length; - if (this._initialValue !== undefined) { - value = this._initialValue; - i = 0; - } else { - value = Promise.resolve(values[0]); - i = 1; +var parameterCount = function(fn) { + if (typeof fn.length === "number") { + return Math.max(Math.min(fn.length, 1023 + 1), 0); } + return 0; +}; - this._currentCancellable = value; +makeNodePromisifiedEval = +function(callback, receiver, originalName, fn, _, multiArgs) { + var newParameterCount = Math.max(0, parameterCount(fn) - 1); + var argumentOrder = switchCaseArgumentOrder(newParameterCount); + var shouldProxyThis = typeof callback === "string" || receiver === THIS; - if (!value.isRejected()) { - for (; i < length; ++i) { - var ctx = { - accum: null, - value: values[i], - index: i, - length: length, - array: this - }; - value = value._then(gotAccum, undefined, undefined, ctx, undefined); + function generateCallForArgumentCount(count) { + var args = argumentSequence(count).join(", "); + var comma = count > 0 ? ", " : ""; + var ret; + if (shouldProxyThis) { + ret = "ret = callback.call(this, {{args}}, nodeback); break;\n"; + } else { + ret = receiver === undefined + ? "ret = callback({{args}}, nodeback); break;\n" + : "ret = callback.call(receiver, {{args}}, nodeback); break;\n"; } + return ret.replace("{{args}}", args).replace(", ", comma); } - if (this._eachValues !== undefined) { - value = value - ._then(this._eachComplete, undefined, undefined, this, undefined); - } - value._then(completed, completed, undefined, value, this); -}; - -Promise.prototype.reduce = function (fn, initialValue) { - return reduce(this, fn, initialValue, null); -}; - -Promise.reduce = function (promises, fn, initialValue, _each) { - return reduce(promises, fn, initialValue, _each); -}; - -function completed(valueOrReason, array) { - if (this.isFulfilled()) { - array._resolve(valueOrReason); - } else { - array._reject(valueOrReason); - } -} + function generateArgumentSwitchCase() { + var ret = ""; + for (var i = 0; i < argumentOrder.length; ++i) { + ret += "case " + argumentOrder[i] +":" + + generateCallForArgumentCount(argumentOrder[i]); + } -function reduce(promises, fn, initialValue, _each) { - if (typeof fn !== "function") { - return apiRejection("expecting a function but got " + util.classString(fn)); + ret += " \n\ + default: \n\ + var args = new Array(len + 1); \n\ + var i = 0; \n\ + for (var i = 0; i < len; ++i) { \n\ + args[i] = arguments[i]; \n\ + } \n\ + args[i] = nodeback; \n\ + [CodeForCall] \n\ + break; \n\ + ".replace("[CodeForCall]", (shouldProxyThis + ? "ret = callback.apply(this, args);\n" + : "ret = callback.apply(receiver, args);\n")); + return ret; } - var array = new ReductionPromiseArray(promises, fn, initialValue, _each); - return array.promise(); -} -function gotAccum(accum) { - this.accum = accum; - this.array._gotAccum(accum); - var value = tryConvertToPromise(this.value, this.array._promise); - if (value instanceof Promise) { - this.array._currentCancellable = value; - return value._then(gotValue, undefined, undefined, this, undefined); - } else { - return gotValue.call(this, value); - } + var getFunctionCode = typeof callback === "string" + ? ("this != null ? this['"+callback+"'] : fn") + : "fn"; + var body = "'use strict'; \n\ + var ret = function (Parameters) { \n\ + 'use strict'; \n\ + var len = arguments.length; \n\ + var promise = new Promise(INTERNAL); \n\ + promise._captureStackTrace(); \n\ + var nodeback = nodebackForPromise(promise, " + multiArgs + "); \n\ + var ret; \n\ + var callback = tryCatch([GetFunctionCode]); \n\ + switch(len) { \n\ + [CodeForSwitchCase] \n\ + } \n\ + if (ret === errorObj) { \n\ + promise._rejectCallback(maybeWrapAsError(ret.e), true, true);\n\ + } \n\ + if (!promise._isFateSealed()) promise._setAsyncGuaranteed(); \n\ + return promise; \n\ + }; \n\ + notEnumerableProp(ret, '__isPromisified__', true); \n\ + return ret; \n\ + ".replace("[CodeForSwitchCase]", generateArgumentSwitchCase()) + .replace("[GetFunctionCode]", getFunctionCode); + body = body.replace("Parameters", parameterDeclaration(newParameterCount)); + return new Function("Promise", + "fn", + "receiver", + "withAppended", + "maybeWrapAsError", + "nodebackForPromise", + "tryCatch", + "errorObj", + "notEnumerableProp", + "INTERNAL", + body)( + Promise, + fn, + receiver, + withAppended, + maybeWrapAsError, + nodebackForPromise, + util.tryCatch, + util.errorObj, + util.notEnumerableProp, + INTERNAL); +}; } -function gotValue(value) { - var array = this.array; - var promise = array._promise; - var fn = tryCatch(array._fn); - promise._pushContext(); - var ret; - if (array._eachValues !== undefined) { - ret = fn.call(promise._boundValue(), value, this.index, this.length); - } else { - ret = fn.call(promise._boundValue(), - this.accum, value, this.index, this.length); +function makeNodePromisifiedClosure(callback, receiver, _, fn, __, multiArgs) { + var defaultThis = (function() {return this;})(); + var method = callback; + if (typeof method === "string") { + callback = fn; } - if (ret instanceof Promise) { - array._currentCancellable = ret; + function promisified() { + var _receiver = receiver; + if (receiver === THIS) _receiver = this; + var promise = new Promise(INTERNAL); + promise._captureStackTrace(); + var cb = typeof method === "string" && this !== defaultThis + ? this[method] : callback; + var fn = nodebackForPromise(promise, multiArgs); + try { + cb.apply(_receiver, withAppended(arguments, fn)); + } catch(e) { + promise._rejectCallback(maybeWrapAsError(e), true, true); + } + if (!promise._isFateSealed()) promise._setAsyncGuaranteed(); + return promise; } - var promiseCreated = promise._popContext(); - debug.checkForgottenReturns( - ret, - promiseCreated, - array._eachValues !== undefined ? "Promise.each" : "Promise.reduce", - promise - ); - return ret; + util.notEnumerableProp(promisified, "__isPromisified__", true); + return promisified; } -}; - - -/***/ }), -/***/ 6203: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -var util = __nccwpck_require__(7448); -var schedule; -var noAsyncScheduler = function() { - throw new Error("No async scheduler available\u000a\u000a See http://goo.gl/MqrFmX\u000a"); -}; -var NativePromise = util.getNativePromise(); -if (util.isNode && typeof MutationObserver === "undefined") { - var GlobalSetImmediate = global.setImmediate; - var ProcessNextTick = process.nextTick; - schedule = util.isRecentNode - ? function(fn) { GlobalSetImmediate.call(global, fn); } - : function(fn) { ProcessNextTick.call(process, fn); }; -} else if (typeof NativePromise === "function" && - typeof NativePromise.resolve === "function") { - var nativePromise = NativePromise.resolve(); - schedule = function(fn) { - nativePromise.then(fn); - }; -} else if ((typeof MutationObserver !== "undefined") && - !(typeof window !== "undefined" && - window.navigator && - (window.navigator.standalone || window.cordova))) { - schedule = (function() { - var div = document.createElement("div"); - var opts = {attributes: true}; - var toggleScheduled = false; - var div2 = document.createElement("div"); - var o2 = new MutationObserver(function() { - div.classList.toggle("foo"); - toggleScheduled = false; - }); - o2.observe(div2, opts); +var makeNodePromisified = canEvaluate + ? makeNodePromisifiedEval + : makeNodePromisifiedClosure; - var scheduleToggle = function() { - if (toggleScheduled) return; - toggleScheduled = true; - div2.classList.toggle("foo"); - }; +function promisifyAll(obj, suffix, filter, promisifier, multiArgs) { + var suffixRegexp = new RegExp(escapeIdentRegex(suffix) + "$"); + var methods = + promisifiableMethods(obj, suffix, suffixRegexp, filter); - return function schedule(fn) { - var o = new MutationObserver(function() { - o.disconnect(); - fn(); + for (var i = 0, len = methods.length; i < len; i+= 2) { + var key = methods[i]; + var fn = methods[i+1]; + var promisifiedKey = key + suffix; + if (promisifier === makeNodePromisified) { + obj[promisifiedKey] = + makeNodePromisified(key, THIS, key, fn, suffix, multiArgs); + } else { + var promisified = promisifier(fn, function() { + return makeNodePromisified(key, THIS, key, + fn, suffix, multiArgs); }); - o.observe(div, opts); - scheduleToggle(); - }; - })(); -} else if (typeof setImmediate !== "undefined") { - schedule = function (fn) { - setImmediate(fn); - }; -} else if (typeof setTimeout !== "undefined") { - schedule = function (fn) { - setTimeout(fn, 0); - }; -} else { - schedule = noAsyncScheduler; + util.notEnumerableProp(promisified, "__isPromisified__", true); + obj[promisifiedKey] = promisified; + } + } + util.toFastProperties(obj); + return obj; } -module.exports = schedule; - - -/***/ }), - -/***/ 6087: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -module.exports = - function(Promise, PromiseArray, debug) { -var PromiseInspection = Promise.PromiseInspection; -var util = __nccwpck_require__(7448); -function SettledPromiseArray(values) { - this.constructor$(values); +function promisify(callback, receiver, multiArgs) { + return makeNodePromisified(callback, receiver, undefined, + callback, null, multiArgs); } -util.inherits(SettledPromiseArray, PromiseArray); -SettledPromiseArray.prototype._promiseResolved = function (index, inspection) { - this._values[index] = inspection; - var totalResolved = ++this._totalResolved; - if (totalResolved >= this._length) { - this._resolve(this._values); - return true; +Promise.promisify = function (fn, options) { + if (typeof fn !== "function") { + throw new TypeError("expecting a function but got " + util.classString(fn)); + } + if (isPromisified(fn)) { + return fn; } - return false; + options = Object(options); + var receiver = options.context === undefined ? THIS : options.context; + var multiArgs = !!options.multiArgs; + var ret = promisify(fn, receiver, multiArgs); + util.copyDescriptors(fn, ret, propsFilter); + return ret; }; -SettledPromiseArray.prototype._promiseFulfilled = function (value, index) { - var ret = new PromiseInspection(); - ret._bitField = 33554432; - ret._settledValueField = value; - return this._promiseResolved(index, ret); -}; -SettledPromiseArray.prototype._promiseRejected = function (reason, index) { - var ret = new PromiseInspection(); - ret._bitField = 16777216; - ret._settledValueField = reason; - return this._promiseResolved(index, ret); -}; +Promise.promisifyAll = function (target, options) { + if (typeof target !== "function" && typeof target !== "object") { + throw new TypeError("the target of promisifyAll must be an object or a function\u000a\u000a See http://goo.gl/MqrFmX\u000a"); + } + options = Object(options); + var multiArgs = !!options.multiArgs; + var suffix = options.suffix; + if (typeof suffix !== "string") suffix = defaultSuffix; + var filter = options.filter; + if (typeof filter !== "function") filter = defaultFilter; + var promisifier = options.promisifier; + if (typeof promisifier !== "function") promisifier = makeNodePromisified; -Promise.settle = function (promises) { - debug.deprecated(".settle()", ".reflect()"); - return new SettledPromiseArray(promises).promise(); -}; + if (!util.isIdentifier(suffix)) { + throw new RangeError("suffix must be a valid identifier\u000a\u000a See http://goo.gl/MqrFmX\u000a"); + } -Promise.prototype.settle = function () { - return Promise.settle(this); + var keys = util.inheritedDataKeys(target); + for (var i = 0; i < keys.length; ++i) { + var value = target[keys[i]]; + if (keys[i] !== "constructor" && + util.isClass(value)) { + promisifyAll(value.prototype, suffix, filter, promisifier, + multiArgs); + promisifyAll(value, suffix, filter, promisifier, multiArgs); + } + } + + return promisifyAll(target, suffix, filter, promisifier, multiArgs); }; }; + /***/ }), -/***/ 1156: +/***/ 8272: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = -function(Promise, PromiseArray, apiRejection) { -var util = __nccwpck_require__(7448); -var RangeError = (__nccwpck_require__(5816).RangeError); -var AggregateError = (__nccwpck_require__(5816).AggregateError); -var isArray = util.isArray; -var CANCELLATION = {}; - +module.exports = function( + Promise, PromiseArray, tryConvertToPromise, apiRejection) { +var util = __nccwpck_require__(2587); +var isObject = util.isObject; +var es5 = __nccwpck_require__(9451); +var Es6Map; +if (typeof Map === "function") Es6Map = Map; -function SomePromiseArray(values) { - this.constructor$(values); - this._howMany = 0; - this._unwrap = false; - this._initialized = false; -} -util.inherits(SomePromiseArray, PromiseArray); +var mapToEntries = (function() { + var index = 0; + var size = 0; -SomePromiseArray.prototype._init = function () { - if (!this._initialized) { - return; - } - if (this._howMany === 0) { - this._resolve([]); - return; - } - this._init$(undefined, -5); - var isArrayResolved = isArray(this._values); - if (!this._isResolved() && - isArrayResolved && - this._howMany > this._canPossiblyFulfill()) { - this._reject(this._getRangeError(this.length())); + function extractEntry(value, key) { + this[index] = value; + this[index + size] = key; + index++; } -}; - -SomePromiseArray.prototype.init = function () { - this._initialized = true; - this._init(); -}; - -SomePromiseArray.prototype.setUnwrap = function () { - this._unwrap = true; -}; -SomePromiseArray.prototype.howMany = function () { - return this._howMany; -}; + return function mapToEntries(map) { + size = map.size; + index = 0; + var ret = new Array(map.size * 2); + map.forEach(extractEntry, ret); + return ret; + }; +})(); -SomePromiseArray.prototype.setHowMany = function (count) { - this._howMany = count; +var entriesToMap = function(entries) { + var ret = new Es6Map(); + var length = entries.length / 2 | 0; + for (var i = 0; i < length; ++i) { + var key = entries[length + i]; + var value = entries[i]; + ret.set(key, value); + } + return ret; }; -SomePromiseArray.prototype._promiseFulfilled = function (value) { - this._addFulfilled(value); - if (this._fulfilled() === this.howMany()) { - this._values.length = this.howMany(); - if (this.howMany() === 1 && this._unwrap) { - this._resolve(this._values[0]); - } else { - this._resolve(this._values); +function PropertiesPromiseArray(obj) { + var isMap = false; + var entries; + if (Es6Map !== undefined && obj instanceof Es6Map) { + entries = mapToEntries(obj); + isMap = true; + } else { + var keys = es5.keys(obj); + var len = keys.length; + entries = new Array(len * 2); + for (var i = 0; i < len; ++i) { + var key = keys[i]; + entries[i] = obj[key]; + entries[i + len] = key; } - return true; } - return false; - -}; -SomePromiseArray.prototype._promiseRejected = function (reason) { - this._addRejected(reason); - return this._checkOutcome(); -}; + this.constructor$(entries); + this._isMap = isMap; + this._init$(undefined, -3); +} +util.inherits(PropertiesPromiseArray, PromiseArray); -SomePromiseArray.prototype._promiseCancelled = function () { - if (this._values instanceof Promise || this._values == null) { - return this._cancel(); - } - this._addRejected(CANCELLATION); - return this._checkOutcome(); -}; +PropertiesPromiseArray.prototype._init = function () {}; -SomePromiseArray.prototype._checkOutcome = function() { - if (this.howMany() > this._canPossiblyFulfill()) { - var e = new AggregateError(); - for (var i = this.length(); i < this._values.length; ++i) { - if (this._values[i] !== CANCELLATION) { - e.push(this._values[i]); - } - } - if (e.length > 0) { - this._reject(e); +PropertiesPromiseArray.prototype._promiseFulfilled = function (value, index) { + this._values[index] = value; + var totalResolved = ++this._totalResolved; + if (totalResolved >= this._length) { + var val; + if (this._isMap) { + val = entriesToMap(this._values); } else { - this._cancel(); + val = {}; + var keyOffset = this.length(); + for (var i = 0, len = this.length(); i < len; ++i) { + val[this._values[i + keyOffset]] = this._values[i]; + } } + this._resolve(val); return true; } return false; }; -SomePromiseArray.prototype._fulfilled = function () { - return this._totalResolved; -}; - -SomePromiseArray.prototype._rejected = function () { - return this._values.length - this.length(); -}; - -SomePromiseArray.prototype._addRejected = function (reason) { - this._values.push(reason); -}; - -SomePromiseArray.prototype._addFulfilled = function (value) { - this._values[this._totalResolved++] = value; +PropertiesPromiseArray.prototype.shouldCopyValues = function () { + return false; }; -SomePromiseArray.prototype._canPossiblyFulfill = function () { - return this.length() - this._rejected(); +PropertiesPromiseArray.prototype.getActualLength = function (len) { + return len >> 1; }; -SomePromiseArray.prototype._getRangeError = function (count) { - var message = "Input array must contain at least " + - this._howMany + " items but contains only " + count + " items"; - return new RangeError(message); -}; +function props(promises) { + var ret; + var castValue = tryConvertToPromise(promises); -SomePromiseArray.prototype._resolveEmptyArray = function () { - this._reject(this._getRangeError(0)); -}; + if (!isObject(castValue)) { + return apiRejection("cannot await properties of a non-object\u000a\u000a See http://goo.gl/MqrFmX\u000a"); + } else if (castValue instanceof Promise) { + ret = castValue._then( + Promise.props, undefined, undefined, undefined, undefined); + } else { + ret = new PropertiesPromiseArray(castValue).promise(); + } -function some(promises, howMany) { - if ((howMany | 0) !== howMany || howMany < 0) { - return apiRejection("expecting a positive integer\u000a\u000a See http://goo.gl/MqrFmX\u000a"); + if (castValue instanceof Promise) { + ret._propagateFrom(castValue, 2); } - var ret = new SomePromiseArray(promises); - var promise = ret.promise(); - ret.setHowMany(howMany); - ret.init(); - return promise; + return ret; } -Promise.some = function (promises, howMany) { - return some(promises, howMany); +Promise.prototype.props = function () { + return props(this); }; -Promise.prototype.some = function (howMany) { - return some(this, howMany); +Promise.props = function (promises) { + return props(promises); }; - -Promise._SomePromiseArray = SomePromiseArray; }; /***/ }), -/***/ 6653: +/***/ 956: /***/ ((module) => { -module.exports = function(Promise) { -function PromiseInspection(promise) { - if (promise !== undefined) { - promise = promise._target(); - this._bitField = promise._bitField; - this._settledValueField = promise._isFateSealed() - ? promise._settledValue() : undefined; - } - else { - this._bitField = 0; - this._settledValueField = undefined; +function arrayMove(src, srcIndex, dst, dstIndex, len) { + for (var j = 0; j < len; ++j) { + dst[j + dstIndex] = src[j + srcIndex]; + src[j + srcIndex] = void 0; } } -PromiseInspection.prototype._settledValue = function() { - return this._settledValueField; -}; - -var value = PromiseInspection.prototype.value = function () { - if (!this.isFulfilled()) { - throw new TypeError("cannot get fulfillment value of a non-fulfilled promise\u000a\u000a See http://goo.gl/MqrFmX\u000a"); - } - return this._settledValue(); -}; - -var reason = PromiseInspection.prototype.error = -PromiseInspection.prototype.reason = function () { - if (!this.isRejected()) { - throw new TypeError("cannot get rejection reason of a non-rejected promise\u000a\u000a See http://goo.gl/MqrFmX\u000a"); - } - return this._settledValue(); -}; - -var isFulfilled = PromiseInspection.prototype.isFulfilled = function() { - return (this._bitField & 33554432) !== 0; -}; - -var isRejected = PromiseInspection.prototype.isRejected = function () { - return (this._bitField & 16777216) !== 0; -}; - -var isPending = PromiseInspection.prototype.isPending = function () { - return (this._bitField & 50397184) === 0; -}; - -var isResolved = PromiseInspection.prototype.isResolved = function () { - return (this._bitField & 50331648) !== 0; -}; - -PromiseInspection.prototype.isCancelled = function() { - return (this._bitField & 8454144) !== 0; -}; - -Promise.prototype.__isCancelled = function() { - return (this._bitField & 65536) === 65536; -}; - -Promise.prototype._isCancelled = function() { - return this._target().__isCancelled(); -}; - -Promise.prototype.isCancelled = function() { - return (this._target()._bitField & 8454144) !== 0; -}; - -Promise.prototype.isPending = function() { - return isPending.call(this._target()); -}; +function Queue(capacity) { + this._capacity = capacity; + this._length = 0; + this._front = 0; +} -Promise.prototype.isRejected = function() { - return isRejected.call(this._target()); +Queue.prototype._willBeOverCapacity = function (size) { + return this._capacity < size; }; -Promise.prototype.isFulfilled = function() { - return isFulfilled.call(this._target()); +Queue.prototype._pushOne = function (arg) { + var length = this.length(); + this._checkCapacity(length + 1); + var i = (this._front + length) & (this._capacity - 1); + this[i] = arg; + this._length = length + 1; }; -Promise.prototype.isResolved = function() { - return isResolved.call(this._target()); +Queue.prototype.push = function (fn, receiver, arg) { + var length = this.length() + 3; + if (this._willBeOverCapacity(length)) { + this._pushOne(fn); + this._pushOne(receiver); + this._pushOne(arg); + return; + } + var j = this._front + length - 3; + this._checkCapacity(length); + var wrapMask = this._capacity - 1; + this[(j + 0) & wrapMask] = fn; + this[(j + 1) & wrapMask] = receiver; + this[(j + 2) & wrapMask] = arg; + this._length = length; }; -Promise.prototype.value = function() { - return value.call(this._target()); -}; +Queue.prototype.shift = function () { + var front = this._front, + ret = this[front]; -Promise.prototype.reason = function() { - var target = this._target(); - target._unsetRejectionIsUnhandled(); - return reason.call(target); + this[front] = undefined; + this._front = (front + 1) & (this._capacity - 1); + this._length--; + return ret; }; -Promise.prototype._value = function() { - return this._settledValue(); +Queue.prototype.length = function () { + return this._length; }; -Promise.prototype._reason = function() { - this._unsetRejectionIsUnhandled(); - return this._settledValue(); +Queue.prototype._checkCapacity = function (size) { + if (this._capacity < size) { + this._resizeTo(this._capacity << 1); + } }; -Promise.PromiseInspection = PromiseInspection; +Queue.prototype._resizeTo = function (capacity) { + var oldCapacity = this._capacity; + this._capacity = capacity; + var front = this._front; + var length = this._length; + var moveItemsCount = (front + length) & (oldCapacity - 1); + arrayMove(this, 0, this, oldCapacity, moveItemsCount); }; - -/***/ }), - -/***/ 9787: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -module.exports = function(Promise, INTERNAL) { -var util = __nccwpck_require__(7448); -var errorObj = util.errorObj; -var isObject = util.isObject; - -function tryConvertToPromise(obj, context) { - if (isObject(obj)) { - if (obj instanceof Promise) return obj; - var then = getThen(obj); - if (then === errorObj) { - if (context) context._pushContext(); - var ret = Promise.reject(then.e); - if (context) context._popContext(); - return ret; - } else if (typeof then === "function") { - if (isAnyBluebirdPromise(obj)) { - var ret = new Promise(INTERNAL); - obj._then( - ret._fulfill, - ret._reject, - undefined, - ret, - null - ); - return ret; - } - return doThenable(obj, then, context); - } - } - return obj; -} - -function doGetThen(obj) { - return obj.then; -} - -function getThen(obj) { - try { - return doGetThen(obj); - } catch (e) { - errorObj.e = e; - return errorObj; - } -} - -var hasProp = {}.hasOwnProperty; -function isAnyBluebirdPromise(obj) { - try { - return hasProp.call(obj, "_promise0"); - } catch (e) { - return false; - } -} - -function doThenable(x, then, context) { - var promise = new Promise(INTERNAL); - var ret = promise; - if (context) context._pushContext(); - promise._captureStackTrace(); - if (context) context._popContext(); - var synchronous = true; - var result = util.tryCatch(then).call(x, resolve, reject); - synchronous = false; - - if (promise && result === errorObj) { - promise._rejectCallback(result.e, true, true); - promise = null; - } - - function resolve(value) { - if (!promise) return; - promise._resolveCallback(value); - promise = null; - } - - function reject(reason) { - if (!promise) return; - promise._rejectCallback(reason, synchronous, true); - promise = null; - } - return ret; -} - -return tryConvertToPromise; -}; +module.exports = Queue; /***/ }), -/***/ 2114: +/***/ 9133: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = function(Promise, INTERNAL, debug) { -var util = __nccwpck_require__(7448); -var TimeoutError = Promise.TimeoutError; - -function HandleWrapper(handle) { - this.handle = handle; -} - -HandleWrapper.prototype._resultCancelled = function() { - clearTimeout(this.handle); -}; +module.exports = function( + Promise, INTERNAL, tryConvertToPromise, apiRejection) { +var util = __nccwpck_require__(2587); -var afterValue = function(value) { return delay(+this).thenReturn(value); }; -var delay = Promise.delay = function (ms, value) { - var ret; - var handle; - if (value !== undefined) { - ret = Promise.resolve(value) - ._then(afterValue, null, null, ms, undefined); - if (debug.cancellation() && value instanceof Promise) { - ret._setOnCancel(value); - } - } else { - ret = new Promise(INTERNAL); - handle = setTimeout(function() { ret._fulfill(); }, +ms); - if (debug.cancellation()) { - ret._setOnCancel(new HandleWrapper(handle)); - } - ret._captureStackTrace(); - } - ret._setAsyncGuaranteed(); - return ret; +var raceLater = function (promise) { + return promise.then(function(array) { + return race(array, promise); + }); }; -Promise.prototype.delay = function (ms) { - return delay(ms, this); -}; +function race(promises, parent) { + var maybePromise = tryConvertToPromise(promises); -var afterTimeout = function (promise, message, parent) { - var err; - if (typeof message !== "string") { - if (message instanceof Error) { - err = message; - } else { - err = new TimeoutError("operation timed out"); - } + if (maybePromise instanceof Promise) { + return raceLater(maybePromise); } else { - err = new TimeoutError(message); + promises = util.asArray(promises); + if (promises === null) + return apiRejection("expecting an array or an iterable object but got " + util.classString(promises)); } - util.markAsOriginatingFromRejection(err); - promise._attachExtraTrace(err); - promise._reject(err); - if (parent != null) { - parent.cancel(); + var ret = new Promise(INTERNAL); + if (parent !== undefined) { + ret._propagateFrom(parent, 3); } -}; - -function successClear(value) { - clearTimeout(this.handle); - return value; -} - -function failureClear(reason) { - clearTimeout(this.handle); - throw reason; -} - -Promise.prototype.timeout = function (ms, message) { - ms = +ms; - var ret, parent; + var fulfill = ret._fulfill; + var reject = ret._reject; + for (var i = 0, len = promises.length; i < len; ++i) { + var val = promises[i]; - var handleWrapper = new HandleWrapper(setTimeout(function timeoutTimeout() { - if (ret.isPending()) { - afterTimeout(ret, message, parent); + if (val === undefined && !(i in promises)) { + continue; } - }, ms)); - if (debug.cancellation()) { - parent = this.then(); - ret = parent._then(successClear, failureClear, - undefined, handleWrapper, undefined); - ret._setOnCancel(handleWrapper); - } else { - ret = this._then(successClear, failureClear, - undefined, handleWrapper, undefined); + Promise.cast(val)._then(fulfill, reject, undefined, ret, null); } - return ret; +} + +Promise.race = function (promises) { + return race(promises, undefined); +}; + +Promise.prototype.race = function () { + return race(this, undefined); }; }; @@ -16274,1535 +12265,1386 @@ Promise.prototype.timeout = function (ms, message) { /***/ }), -/***/ 880: +/***/ 5372: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = function (Promise, apiRejection, tryConvertToPromise, - createContext, INTERNAL, debug) { - var util = __nccwpck_require__(7448); - var TypeError = (__nccwpck_require__(5816).TypeError); - var inherits = (__nccwpck_require__(7448).inherits); - var errorObj = util.errorObj; - var tryCatch = util.tryCatch; - var NULL = {}; +module.exports = function(Promise, + PromiseArray, + apiRejection, + tryConvertToPromise, + INTERNAL, + debug) { +var getDomain = Promise._getDomain; +var util = __nccwpck_require__(2587); +var tryCatch = util.tryCatch; - function thrower(e) { - setTimeout(function(){throw e;}, 0); +function ReductionPromiseArray(promises, fn, initialValue, _each) { + this.constructor$(promises); + var domain = getDomain(); + this._fn = domain === null ? fn : util.domainBind(domain, fn); + if (initialValue !== undefined) { + initialValue = Promise.resolve(initialValue); + initialValue._attachCancellationCallback(this); } - - function castPreservingDisposable(thenable) { - var maybePromise = tryConvertToPromise(thenable); - if (maybePromise !== thenable && - typeof thenable._isDisposable === "function" && - typeof thenable._getDisposer === "function" && - thenable._isDisposable()) { - maybePromise._setDisposable(thenable._getDisposer()); - } - return maybePromise; + this._initialValue = initialValue; + this._currentCancellable = null; + if(_each === INTERNAL) { + this._eachValues = Array(this._length); + } else if (_each === 0) { + this._eachValues = null; + } else { + this._eachValues = undefined; } - function dispose(resources, inspection) { - var i = 0; - var len = resources.length; - var ret = new Promise(INTERNAL); - function iterator() { - if (i >= len) return ret._fulfill(); - var maybePromise = castPreservingDisposable(resources[i++]); - if (maybePromise instanceof Promise && - maybePromise._isDisposable()) { - try { - maybePromise = tryConvertToPromise( - maybePromise._getDisposer().tryDispose(inspection), - resources.promise); - } catch (e) { - return thrower(e); - } - if (maybePromise instanceof Promise) { - return maybePromise._then(iterator, thrower, - null, null, null); - } - } - iterator(); - } - iterator(); - return ret; + this._promise._captureStackTrace(); + this._init$(undefined, -5); +} +util.inherits(ReductionPromiseArray, PromiseArray); + +ReductionPromiseArray.prototype._gotAccum = function(accum) { + if (this._eachValues !== undefined && + this._eachValues !== null && + accum !== INTERNAL) { + this._eachValues.push(accum); } +}; - function Disposer(data, promise, context) { - this._data = data; - this._promise = promise; - this._context = context; +ReductionPromiseArray.prototype._eachComplete = function(value) { + if (this._eachValues !== null) { + this._eachValues.push(value); } + return this._eachValues; +}; - Disposer.prototype.data = function () { - return this._data; - }; +ReductionPromiseArray.prototype._init = function() {}; - Disposer.prototype.promise = function () { - return this._promise; - }; +ReductionPromiseArray.prototype._resolveEmptyArray = function() { + this._resolve(this._eachValues !== undefined ? this._eachValues + : this._initialValue); +}; - Disposer.prototype.resource = function () { - if (this.promise().isFulfilled()) { - return this.promise().value(); - } - return NULL; - }; +ReductionPromiseArray.prototype.shouldCopyValues = function () { + return false; +}; - Disposer.prototype.tryDispose = function(inspection) { - var resource = this.resource(); - var context = this._context; - if (context !== undefined) context._pushContext(); - var ret = resource !== NULL - ? this.doDispose(resource, inspection) : null; - if (context !== undefined) context._popContext(); - this._promise._unsetDisposable(); - this._data = null; - return ret; - }; +ReductionPromiseArray.prototype._resolve = function(value) { + this._promise._resolveCallback(value); + this._values = null; +}; - Disposer.isDisposer = function (d) { - return (d != null && - typeof d.resource === "function" && - typeof d.tryDispose === "function"); - }; +ReductionPromiseArray.prototype._resultCancelled = function(sender) { + if (sender === this._initialValue) return this._cancel(); + if (this._isResolved()) return; + this._resultCancelled$(); + if (this._currentCancellable instanceof Promise) { + this._currentCancellable.cancel(); + } + if (this._initialValue instanceof Promise) { + this._initialValue.cancel(); + } +}; - function FunctionDisposer(fn, promise, context) { - this.constructor$(fn, promise, context); +ReductionPromiseArray.prototype._iterate = function (values) { + this._values = values; + var value; + var i; + var length = values.length; + if (this._initialValue !== undefined) { + value = this._initialValue; + i = 0; + } else { + value = Promise.resolve(values[0]); + i = 1; } - inherits(FunctionDisposer, Disposer); - FunctionDisposer.prototype.doDispose = function (resource, inspection) { - var fn = this.data(); - return fn.call(resource, resource, inspection); - }; + this._currentCancellable = value; - function maybeUnwrapDisposer(value) { - if (Disposer.isDisposer(value)) { - this.resources[this.index]._setDisposable(value); - return value.promise(); + if (!value.isRejected()) { + for (; i < length; ++i) { + var ctx = { + accum: null, + value: values[i], + index: i, + length: length, + array: this + }; + value = value._then(gotAccum, undefined, undefined, ctx, undefined); } - return value; } - function ResourceList(length) { - this.length = length; - this.promise = null; - this[length-1] = null; + if (this._eachValues !== undefined) { + value = value + ._then(this._eachComplete, undefined, undefined, this, undefined); } + value._then(completed, completed, undefined, value, this); +}; - ResourceList.prototype._resultCancelled = function() { - var len = this.length; - for (var i = 0; i < len; ++i) { - var item = this[i]; - if (item instanceof Promise) { - item.cancel(); - } - } - }; +Promise.prototype.reduce = function (fn, initialValue) { + return reduce(this, fn, initialValue, null); +}; - Promise.using = function () { - var len = arguments.length; - if (len < 2) return apiRejection( - "you must pass at least 2 arguments to Promise.using"); - var fn = arguments[len - 1]; - if (typeof fn !== "function") { - return apiRejection("expecting a function but got " + util.classString(fn)); - } - var input; - var spreadArgs = true; - if (len === 2 && Array.isArray(arguments[0])) { - input = arguments[0]; - len = input.length; - spreadArgs = false; - } else { - input = arguments; - len--; - } - var resources = new ResourceList(len); - for (var i = 0; i < len; ++i) { - var resource = input[i]; - if (Disposer.isDisposer(resource)) { - var disposer = resource; - resource = resource.promise(); - resource._setDisposable(disposer); - } else { - var maybePromise = tryConvertToPromise(resource); - if (maybePromise instanceof Promise) { - resource = - maybePromise._then(maybeUnwrapDisposer, null, null, { - resources: resources, - index: i - }, undefined); - } - } - resources[i] = resource; - } +Promise.reduce = function (promises, fn, initialValue, _each) { + return reduce(promises, fn, initialValue, _each); +}; - var reflectedResources = new Array(resources.length); - for (var i = 0; i < reflectedResources.length; ++i) { - reflectedResources[i] = Promise.resolve(resources[i]).reflect(); - } +function completed(valueOrReason, array) { + if (this.isFulfilled()) { + array._resolve(valueOrReason); + } else { + array._reject(valueOrReason); + } +} - var resultPromise = Promise.all(reflectedResources) - .then(function(inspections) { - for (var i = 0; i < inspections.length; ++i) { - var inspection = inspections[i]; - if (inspection.isRejected()) { - errorObj.e = inspection.error(); - return errorObj; - } else if (!inspection.isFulfilled()) { - resultPromise.cancel(); - return; - } - inspections[i] = inspection.value(); - } - promise._pushContext(); +function reduce(promises, fn, initialValue, _each) { + if (typeof fn !== "function") { + return apiRejection("expecting a function but got " + util.classString(fn)); + } + var array = new ReductionPromiseArray(promises, fn, initialValue, _each); + return array.promise(); +} - fn = tryCatch(fn); - var ret = spreadArgs - ? fn.apply(undefined, inspections) : fn(inspections); - var promiseCreated = promise._popContext(); - debug.checkForgottenReturns( - ret, promiseCreated, "Promise.using", promise); - return ret; - }); +function gotAccum(accum) { + this.accum = accum; + this.array._gotAccum(accum); + var value = tryConvertToPromise(this.value, this.array._promise); + if (value instanceof Promise) { + this.array._currentCancellable = value; + return value._then(gotValue, undefined, undefined, this, undefined); + } else { + return gotValue.call(this, value); + } +} - var promise = resultPromise.lastly(function() { - var inspection = new Promise.PromiseInspection(resultPromise); - return dispose(resources, inspection); - }); - resources.promise = promise; - promise._setOnCancel(resources); - return promise; - }; +function gotValue(value) { + var array = this.array; + var promise = array._promise; + var fn = tryCatch(array._fn); + promise._pushContext(); + var ret; + if (array._eachValues !== undefined) { + ret = fn.call(promise._boundValue(), value, this.index, this.length); + } else { + ret = fn.call(promise._boundValue(), + this.accum, value, this.index, this.length); + } + if (ret instanceof Promise) { + array._currentCancellable = ret; + } + var promiseCreated = promise._popContext(); + debug.checkForgottenReturns( + ret, + promiseCreated, + array._eachValues !== undefined ? "Promise.each" : "Promise.reduce", + promise + ); + return ret; +} +}; - Promise.prototype._setDisposable = function (disposer) { - this._bitField = this._bitField | 131072; - this._disposer = disposer; - }; - Promise.prototype._isDisposable = function () { - return (this._bitField & 131072) > 0; - }; +/***/ }), - Promise.prototype._getDisposer = function () { - return this._disposer; - }; +/***/ 8236: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - Promise.prototype._unsetDisposable = function () { - this._bitField = this._bitField & (~131072); - this._disposer = undefined; - }; - Promise.prototype.disposer = function (fn) { - if (typeof fn === "function") { - return new FunctionDisposer(fn, this, createContext()); - } - throw new TypeError(); +var util = __nccwpck_require__(2587); +var schedule; +var noAsyncScheduler = function() { + throw new Error("No async scheduler available\u000a\u000a See http://goo.gl/MqrFmX\u000a"); +}; +var NativePromise = util.getNativePromise(); +if (util.isNode && typeof MutationObserver === "undefined") { + var GlobalSetImmediate = global.setImmediate; + var ProcessNextTick = process.nextTick; + schedule = util.isRecentNode + ? function(fn) { GlobalSetImmediate.call(global, fn); } + : function(fn) { ProcessNextTick.call(process, fn); }; +} else if (typeof NativePromise === "function" && + typeof NativePromise.resolve === "function") { + var nativePromise = NativePromise.resolve(); + schedule = function(fn) { + nativePromise.then(fn); }; +} else if ((typeof MutationObserver !== "undefined") && + !(typeof window !== "undefined" && + window.navigator && + (window.navigator.standalone || window.cordova))) { + schedule = (function() { + var div = document.createElement("div"); + var opts = {attributes: true}; + var toggleScheduled = false; + var div2 = document.createElement("div"); + var o2 = new MutationObserver(function() { + div.classList.toggle("foo"); + toggleScheduled = false; + }); + o2.observe(div2, opts); -}; + var scheduleToggle = function() { + if (toggleScheduled) return; + toggleScheduled = true; + div2.classList.toggle("foo"); + }; + + return function schedule(fn) { + var o = new MutationObserver(function() { + o.disconnect(); + fn(); + }); + o.observe(div, opts); + scheduleToggle(); + }; + })(); +} else if (typeof setImmediate !== "undefined") { + schedule = function (fn) { + setImmediate(fn); + }; +} else if (typeof setTimeout !== "undefined") { + schedule = function (fn) { + setTimeout(fn, 0); + }; +} else { + schedule = noAsyncScheduler; +} +module.exports = schedule; /***/ }), -/***/ 7448: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { - +/***/ 1012: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var es5 = __nccwpck_require__(3062); -var canEvaluate = typeof navigator == "undefined"; -var errorObj = {e: {}}; -var tryCatchTarget; -var globalObject = typeof self !== "undefined" ? self : - typeof window !== "undefined" ? window : - typeof global !== "undefined" ? global : - this !== undefined ? this : null; +module.exports = + function(Promise, PromiseArray, debug) { +var PromiseInspection = Promise.PromiseInspection; +var util = __nccwpck_require__(2587); -function tryCatcher() { - try { - var target = tryCatchTarget; - tryCatchTarget = null; - return target.apply(this, arguments); - } catch (e) { - errorObj.e = e; - return errorObj; - } -} -function tryCatch(fn) { - tryCatchTarget = fn; - return tryCatcher; +function SettledPromiseArray(values) { + this.constructor$(values); } +util.inherits(SettledPromiseArray, PromiseArray); -var inherits = function(Child, Parent) { - var hasProp = {}.hasOwnProperty; - - function T() { - this.constructor = Child; - this.constructor$ = Parent; - for (var propertyName in Parent.prototype) { - if (hasProp.call(Parent.prototype, propertyName) && - propertyName.charAt(propertyName.length-1) !== "$" - ) { - this[propertyName + "$"] = Parent.prototype[propertyName]; - } - } +SettledPromiseArray.prototype._promiseResolved = function (index, inspection) { + this._values[index] = inspection; + var totalResolved = ++this._totalResolved; + if (totalResolved >= this._length) { + this._resolve(this._values); + return true; } - T.prototype = Parent.prototype; - Child.prototype = new T(); - return Child.prototype; + return false; }; +SettledPromiseArray.prototype._promiseFulfilled = function (value, index) { + var ret = new PromiseInspection(); + ret._bitField = 33554432; + ret._settledValueField = value; + return this._promiseResolved(index, ret); +}; +SettledPromiseArray.prototype._promiseRejected = function (reason, index) { + var ret = new PromiseInspection(); + ret._bitField = 16777216; + ret._settledValueField = reason; + return this._promiseResolved(index, ret); +}; -function isPrimitive(val) { - return val == null || val === true || val === false || - typeof val === "string" || typeof val === "number"; +Promise.settle = function (promises) { + debug.deprecated(".settle()", ".reflect()"); + return new SettledPromiseArray(promises).promise(); +}; -} +Promise.prototype.settle = function () { + return Promise.settle(this); +}; +}; -function isObject(value) { - return typeof value === "function" || - typeof value === "object" && value !== null; -} -function maybeWrapAsError(maybeError) { - if (!isPrimitive(maybeError)) return maybeError; +/***/ }), - return new Error(safeToString(maybeError)); -} +/***/ 3337: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function withAppended(target, appendee) { - var len = target.length; - var ret = new Array(len + 1); - var i; - for (i = 0; i < len; ++i) { - ret[i] = target[i]; - } - ret[i] = appendee; - return ret; -} -function getDataPropertyOrDefault(obj, key, defaultValue) { - if (es5.isES5) { - var desc = Object.getOwnPropertyDescriptor(obj, key); +module.exports = +function(Promise, PromiseArray, apiRejection) { +var util = __nccwpck_require__(2587); +var RangeError = (__nccwpck_require__(5514).RangeError); +var AggregateError = (__nccwpck_require__(5514).AggregateError); +var isArray = util.isArray; +var CANCELLATION = {}; - if (desc != null) { - return desc.get == null && desc.set == null - ? desc.value - : defaultValue; - } - } else { - return {}.hasOwnProperty.call(obj, key) ? obj[key] : undefined; - } -} -function notEnumerableProp(obj, name, value) { - if (isPrimitive(obj)) return obj; - var descriptor = { - value: value, - configurable: true, - enumerable: false, - writable: true - }; - es5.defineProperty(obj, name, descriptor); - return obj; +function SomePromiseArray(values) { + this.constructor$(values); + this._howMany = 0; + this._unwrap = false; + this._initialized = false; } +util.inherits(SomePromiseArray, PromiseArray); -function thrower(r) { - throw r; -} +SomePromiseArray.prototype._init = function () { + if (!this._initialized) { + return; + } + if (this._howMany === 0) { + this._resolve([]); + return; + } + this._init$(undefined, -5); + var isArrayResolved = isArray(this._values); + if (!this._isResolved() && + isArrayResolved && + this._howMany > this._canPossiblyFulfill()) { + this._reject(this._getRangeError(this.length())); + } +}; -var inheritedDataKeys = (function() { - var excludedPrototypes = [ - Array.prototype, - Object.prototype, - Function.prototype - ]; +SomePromiseArray.prototype.init = function () { + this._initialized = true; + this._init(); +}; - var isExcludedProto = function(val) { - for (var i = 0; i < excludedPrototypes.length; ++i) { - if (excludedPrototypes[i] === val) { - return true; - } - } - return false; - }; +SomePromiseArray.prototype.setUnwrap = function () { + this._unwrap = true; +}; - if (es5.isES5) { - var getKeys = Object.getOwnPropertyNames; - return function(obj) { - var ret = []; - var visitedKeys = Object.create(null); - while (obj != null && !isExcludedProto(obj)) { - var keys; - try { - keys = getKeys(obj); - } catch (e) { - return ret; - } - for (var i = 0; i < keys.length; ++i) { - var key = keys[i]; - if (visitedKeys[key]) continue; - visitedKeys[key] = true; - var desc = Object.getOwnPropertyDescriptor(obj, key); - if (desc != null && desc.get == null && desc.set == null) { - ret.push(key); - } - } - obj = es5.getPrototypeOf(obj); - } - return ret; - }; - } else { - var hasProp = {}.hasOwnProperty; - return function(obj) { - if (isExcludedProto(obj)) return []; - var ret = []; +SomePromiseArray.prototype.howMany = function () { + return this._howMany; +}; - /*jshint forin:false */ - enumeration: for (var key in obj) { - if (hasProp.call(obj, key)) { - ret.push(key); - } else { - for (var i = 0; i < excludedPrototypes.length; ++i) { - if (hasProp.call(excludedPrototypes[i], key)) { - continue enumeration; - } - } - ret.push(key); - } - } - return ret; - }; - } +SomePromiseArray.prototype.setHowMany = function (count) { + this._howMany = count; +}; -})(); +SomePromiseArray.prototype._promiseFulfilled = function (value) { + this._addFulfilled(value); + if (this._fulfilled() === this.howMany()) { + this._values.length = this.howMany(); + if (this.howMany() === 1 && this._unwrap) { + this._resolve(this._values[0]); + } else { + this._resolve(this._values); + } + return true; + } + return false; -var thisAssignmentPattern = /this\s*\.\s*\S+\s*=/; -function isClass(fn) { - try { - if (typeof fn === "function") { - var keys = es5.names(fn.prototype); +}; +SomePromiseArray.prototype._promiseRejected = function (reason) { + this._addRejected(reason); + return this._checkOutcome(); +}; - var hasMethods = es5.isES5 && keys.length > 1; - var hasMethodsOtherThanConstructor = keys.length > 0 && - !(keys.length === 1 && keys[0] === "constructor"); - var hasThisAssignmentAndStaticMethods = - thisAssignmentPattern.test(fn + "") && es5.names(fn).length > 0; +SomePromiseArray.prototype._promiseCancelled = function () { + if (this._values instanceof Promise || this._values == null) { + return this._cancel(); + } + this._addRejected(CANCELLATION); + return this._checkOutcome(); +}; - if (hasMethods || hasMethodsOtherThanConstructor || - hasThisAssignmentAndStaticMethods) { - return true; +SomePromiseArray.prototype._checkOutcome = function() { + if (this.howMany() > this._canPossiblyFulfill()) { + var e = new AggregateError(); + for (var i = this.length(); i < this._values.length; ++i) { + if (this._values[i] !== CANCELLATION) { + e.push(this._values[i]); } } - return false; - } catch (e) { - return false; + if (e.length > 0) { + this._reject(e); + } else { + this._cancel(); + } + return true; } -} - -function toFastProperties(obj) { - /*jshint -W027,-W055,-W031*/ - function FakeConstructor() {} - FakeConstructor.prototype = obj; - var l = 8; - while (l--) new FakeConstructor(); - return obj; - eval(obj); -} + return false; +}; -var rident = /^[a-z$_][a-z$_0-9]*$/i; -function isIdentifier(str) { - return rident.test(str); -} +SomePromiseArray.prototype._fulfilled = function () { + return this._totalResolved; +}; -function filledRange(count, prefix, suffix) { - var ret = new Array(count); - for(var i = 0; i < count; ++i) { - ret[i] = prefix + i + suffix; - } - return ret; -} +SomePromiseArray.prototype._rejected = function () { + return this._values.length - this.length(); +}; -function safeToString(obj) { - try { - return obj + ""; - } catch (e) { - return "[no string representation]"; - } -} +SomePromiseArray.prototype._addRejected = function (reason) { + this._values.push(reason); +}; -function isError(obj) { - return obj !== null && - typeof obj === "object" && - typeof obj.message === "string" && - typeof obj.name === "string"; -} +SomePromiseArray.prototype._addFulfilled = function (value) { + this._values[this._totalResolved++] = value; +}; -function markAsOriginatingFromRejection(e) { - try { - notEnumerableProp(e, "isOperational", true); - } - catch(ignore) {} -} +SomePromiseArray.prototype._canPossiblyFulfill = function () { + return this.length() - this._rejected(); +}; -function originatesFromRejection(e) { - if (e == null) return false; - return ((e instanceof Error["__BluebirdErrorTypes__"].OperationalError) || - e["isOperational"] === true); -} +SomePromiseArray.prototype._getRangeError = function (count) { + var message = "Input array must contain at least " + + this._howMany + " items but contains only " + count + " items"; + return new RangeError(message); +}; -function canAttachTrace(obj) { - return isError(obj) && es5.propertyIsWritable(obj, "stack"); -} +SomePromiseArray.prototype._resolveEmptyArray = function () { + this._reject(this._getRangeError(0)); +}; -var ensureErrorObject = (function() { - if (!("stack" in new Error())) { - return function(value) { - if (canAttachTrace(value)) return value; - try {throw new Error(safeToString(value));} - catch(err) {return err;} - }; - } else { - return function(value) { - if (canAttachTrace(value)) return value; - return new Error(safeToString(value)); - }; +function some(promises, howMany) { + if ((howMany | 0) !== howMany || howMany < 0) { + return apiRejection("expecting a positive integer\u000a\u000a See http://goo.gl/MqrFmX\u000a"); } -})(); - -function classString(obj) { - return {}.toString.call(obj); + var ret = new SomePromiseArray(promises); + var promise = ret.promise(); + ret.setHowMany(howMany); + ret.init(); + return promise; } -function copyDescriptors(from, to, filter) { - var keys = es5.names(from); - for (var i = 0; i < keys.length; ++i) { - var key = keys[i]; - if (filter(key)) { - try { - es5.defineProperty(to, key, es5.getDescriptor(from, key)); - } catch (ignore) {} - } - } -} +Promise.some = function (promises, howMany) { + return some(promises, howMany); +}; -var asArray = function(v) { - if (es5.isArray(v)) { - return v; - } - return null; +Promise.prototype.some = function (howMany) { + return some(this, howMany); }; -if (typeof Symbol !== "undefined" && Symbol.iterator) { - var ArrayFrom = typeof Array.from === "function" ? function(v) { - return Array.from(v); - } : function(v) { - var ret = []; - var it = v[Symbol.iterator](); - var itResult; - while (!((itResult = it.next()).done)) { - ret.push(itResult.value); - } - return ret; - }; +Promise._SomePromiseArray = SomePromiseArray; +}; - asArray = function(v) { - if (es5.isArray(v)) { - return v; - } else if (v != null && typeof v[Symbol.iterator] === "function") { - return ArrayFrom(v); - } - return null; - }; -} -var isNode = typeof process !== "undefined" && - classString(process).toLowerCase() === "[object process]"; +/***/ }), -var hasEnvVariables = typeof process !== "undefined" && - typeof process.env !== "undefined"; +/***/ 4639: +/***/ ((module) => { -function env(key) { - return hasEnvVariables ? process.env[key] : undefined; -} -function getNativePromise() { - if (typeof Promise === "function") { - try { - var promise = new Promise(function(){}); - if ({}.toString.call(promise) === "[object Promise]") { - return Promise; - } - } catch (e) {} +module.exports = function(Promise) { +function PromiseInspection(promise) { + if (promise !== undefined) { + promise = promise._target(); + this._bitField = promise._bitField; + this._settledValueField = promise._isFateSealed() + ? promise._settledValue() : undefined; + } + else { + this._bitField = 0; + this._settledValueField = undefined; } } -function domainBind(self, cb) { - return self.bind(cb); -} +PromiseInspection.prototype._settledValue = function() { + return this._settledValueField; +}; -var ret = { - isClass: isClass, - isIdentifier: isIdentifier, - inheritedDataKeys: inheritedDataKeys, - getDataPropertyOrDefault: getDataPropertyOrDefault, - thrower: thrower, - isArray: es5.isArray, - asArray: asArray, - notEnumerableProp: notEnumerableProp, - isPrimitive: isPrimitive, - isObject: isObject, - isError: isError, - canEvaluate: canEvaluate, - errorObj: errorObj, - tryCatch: tryCatch, - inherits: inherits, - withAppended: withAppended, - maybeWrapAsError: maybeWrapAsError, - toFastProperties: toFastProperties, - filledRange: filledRange, - toString: safeToString, - canAttachTrace: canAttachTrace, - ensureErrorObject: ensureErrorObject, - originatesFromRejection: originatesFromRejection, - markAsOriginatingFromRejection: markAsOriginatingFromRejection, - classString: classString, - copyDescriptors: copyDescriptors, - hasDevTools: typeof chrome !== "undefined" && chrome && - typeof chrome.loadTimes === "function", - isNode: isNode, - hasEnvVariables: hasEnvVariables, - env: env, - global: globalObject, - getNativePromise: getNativePromise, - domainBind: domainBind +var value = PromiseInspection.prototype.value = function () { + if (!this.isFulfilled()) { + throw new TypeError("cannot get fulfillment value of a non-fulfilled promise\u000a\u000a See http://goo.gl/MqrFmX\u000a"); + } + return this._settledValue(); }; -ret.isRecentNode = ret.isNode && (function() { - var version = process.versions.node.split(".").map(Number); - return (version[0] === 0 && version[1] > 10) || (version[0] > 0); -})(); -if (ret.isNode) ret.toFastProperties(process); +var reason = PromiseInspection.prototype.error = +PromiseInspection.prototype.reason = function () { + if (!this.isRejected()) { + throw new TypeError("cannot get rejection reason of a non-rejected promise\u000a\u000a See http://goo.gl/MqrFmX\u000a"); + } + return this._settledValue(); +}; -try {throw new Error(); } catch (e) {ret.lastLineError = e;} -module.exports = ret; +var isFulfilled = PromiseInspection.prototype.isFulfilled = function() { + return (this._bitField & 33554432) !== 0; +}; +var isRejected = PromiseInspection.prototype.isRejected = function () { + return (this._bitField & 16777216) !== 0; +}; -/***/ }), +var isPending = PromiseInspection.prototype.isPending = function () { + return (this._bitField & 50397184) === 0; +}; -/***/ 3717: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var isResolved = PromiseInspection.prototype.isResolved = function () { + return (this._bitField & 50331648) !== 0; +}; -var concatMap = __nccwpck_require__(6891); -var balanced = __nccwpck_require__(9417); +PromiseInspection.prototype.isCancelled = function() { + return (this._bitField & 8454144) !== 0; +}; -module.exports = expandTop; +Promise.prototype.__isCancelled = function() { + return (this._bitField & 65536) === 65536; +}; -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; +Promise.prototype._isCancelled = function() { + return this._target().__isCancelled(); +}; -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} +Promise.prototype.isCancelled = function() { + return (this._target()._bitField & 8454144) !== 0; +}; -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} +Promise.prototype.isPending = function() { + return isPending.call(this._target()); +}; -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} +Promise.prototype.isRejected = function() { + return isRejected.call(this._target()); +}; +Promise.prototype.isFulfilled = function() { + return isFulfilled.call(this._target()); +}; -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; +Promise.prototype.isResolved = function() { + return isResolved.call(this._target()); +}; - var parts = []; - var m = balanced('{', '}', str); +Promise.prototype.value = function() { + return value.call(this._target()); +}; - if (!m) - return str.split(','); +Promise.prototype.reason = function() { + var target = this._target(); + target._unsetRejectionIsUnhandled(); + return reason.call(target); +}; - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); +Promise.prototype._value = function() { + return this._settledValue(); +}; - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } +Promise.prototype._reason = function() { + this._unsetRejectionIsUnhandled(); + return this._settledValue(); +}; - parts.push.apply(parts, p); +Promise.PromiseInspection = PromiseInspection; +}; - return parts; -} -function expandTop(str) { - if (!str) - return []; +/***/ }), - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } +/***/ 3947: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return expand(escapeBraces(str), true).map(unescapeBraces); -} -function identity(e) { - return e; -} +module.exports = function(Promise, INTERNAL) { +var util = __nccwpck_require__(2587); +var errorObj = util.errorObj; +var isObject = util.isObject; -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); +function tryConvertToPromise(obj, context) { + if (isObject(obj)) { + if (obj instanceof Promise) return obj; + var then = getThen(obj); + if (then === errorObj) { + if (context) context._pushContext(); + var ret = Promise.reject(then.e); + if (context) context._popContext(); + return ret; + } else if (typeof then === "function") { + if (isAnyBluebirdPromise(obj)) { + var ret = new Promise(INTERNAL); + obj._then( + ret._fulfill, + ret._reject, + undefined, + ret, + null + ); + return ret; + } + return doThenable(obj, then, context); + } + } + return obj; } -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; +function doGetThen(obj) { + return obj.then; } -function expand(str, isTop) { - var expansions = []; - - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; - - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); +function getThen(obj) { + try { + return doGetThen(obj); + } catch (e) { + errorObj.e = e; + return errorObj; } - return [str]; - } +} - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } +var hasProp = {}.hasOwnProperty; +function isAnyBluebirdPromise(obj) { + try { + return hasProp.call(obj, "_promise0"); + } catch (e) { + return false; } - } - - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. - - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; +} - var N; +function doThenable(x, then, context) { + var promise = new Promise(INTERNAL); + var ret = promise; + if (context) context._pushContext(); + promise._captureStackTrace(); + if (context) context._popContext(); + var synchronous = true; + var result = util.tryCatch(then).call(x, resolve, reject); + synchronous = false; - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; + if (promise && result === errorObj) { + promise._rejectCallback(result.e, true, true); + promise = null; } - var pad = n.some(isPadded); - - N = []; - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); + function resolve(value) { + if (!promise) return; + promise._resolveCallback(value); + promise = null; } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); - } - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); + function reject(reason) { + if (!promise) return; + promise._rejectCallback(reason, synchronous, true); + promise = null; } - } - - return expansions; + return ret; } +return tryConvertToPromise; +}; /***/ }), -/***/ 7329: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - - -var initBuffer = __nccwpck_require__(3478); - -if (!Buffer.prototype.indexOf) { - Buffer.prototype.indexOf = function (value, offset) { - offset = offset || 0; - - // Always wrap the input as a Buffer so that this method will support any - // data type such as array octet, string or buffer. - if (typeof value === "string" || value instanceof String) { - value = initBuffer(value); - } else if (typeof value === "number" || value instanceof Number) { - value = initBuffer([ value ]); - } - - var len = value.length; +/***/ 3651: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - for (var i = offset; i <= this.length - len; i++) { - var mismatch = false; - for (var j = 0; j < len; j++) { - if (this[i + j] != value[j]) { - mismatch = true; - break; - } - } - if (!mismatch) { - return i; - } - } +module.exports = function(Promise, INTERNAL, debug) { +var util = __nccwpck_require__(2587); +var TimeoutError = Promise.TimeoutError; - return -1; - }; +function HandleWrapper(handle) { + this.handle = handle; } -function bufferLastIndexOf (value, offset) { +HandleWrapper.prototype._resultCancelled = function() { + clearTimeout(this.handle); +}; - // Always wrap the input as a Buffer so that this method will support any - // data type such as array octet, string or buffer. - if (typeof value === "string" || value instanceof String) { - value = initBuffer(value); - } else if (typeof value === "number" || value instanceof Number) { - value = initBuffer([ value ]); +var afterValue = function(value) { return delay(+this).thenReturn(value); }; +var delay = Promise.delay = function (ms, value) { + var ret; + var handle; + if (value !== undefined) { + ret = Promise.resolve(value) + ._then(afterValue, null, null, ms, undefined); + if (debug.cancellation() && value instanceof Promise) { + ret._setOnCancel(value); + } + } else { + ret = new Promise(INTERNAL); + handle = setTimeout(function() { ret._fulfill(); }, +ms); + if (debug.cancellation()) { + ret._setOnCancel(new HandleWrapper(handle)); + } + ret._captureStackTrace(); } + ret._setAsyncGuaranteed(); + return ret; +}; - var len = value.length; - offset = offset || this.length - len; +Promise.prototype.delay = function (ms) { + return delay(ms, this); +}; - for (var i = offset; i >= 0; i--) { - var mismatch = false; - for (var j = 0; j < len; j++) { - if (this[i + j] != value[j]) { - mismatch = true; - break; - } +var afterTimeout = function (promise, message, parent) { + var err; + if (typeof message !== "string") { + if (message instanceof Error) { + err = message; + } else { + err = new TimeoutError("operation timed out"); } + } else { + err = new TimeoutError(message); + } + util.markAsOriginatingFromRejection(err); + promise._attachExtraTrace(err); + promise._reject(err); - if (!mismatch) { - return i; - } + if (parent != null) { + parent.cancel(); } +}; - return -1; +function successClear(value) { + clearTimeout(this.handle); + return value; } - -if (Buffer.prototype.lastIndexOf) { - // check Buffer#lastIndexOf is usable: https://github.com/nodejs/node/issues/4604 - if (initBuffer("ABC").lastIndexOf ("ABC") === -1) - Buffer.prototype.lastIndexOf = bufferLastIndexOf; -} else { - Buffer.prototype.lastIndexOf = bufferLastIndexOf; +function failureClear(reason) { + clearTimeout(this.handle); + throw reason; } +Promise.prototype.timeout = function (ms, message) { + ms = +ms; + var ret, parent; -/***/ }), + var handleWrapper = new HandleWrapper(setTimeout(function timeoutTimeout() { + if (ret.isPending()) { + afterTimeout(ret, message, parent); + } + }, ms)); -/***/ 3478: -/***/ ((module) => { + if (debug.cancellation()) { + parent = this.then(); + ret = parent._then(successClear, failureClear, + undefined, handleWrapper, undefined); + ret._setOnCancel(handleWrapper); + } else { + ret = this._then(successClear, failureClear, + undefined, handleWrapper, undefined); + } + + return ret; +}; -module.exports = function initBuffer(val) { - // assume old version - var nodeVersion = process && process.version ? process.version : "v5.0.0"; - var major = nodeVersion.split(".")[0].replace("v", ""); - return major < 6 - ? new Buffer(val) - : Buffer.from(val); }; + /***/ }), -/***/ 1590: -/***/ ((module) => { +/***/ 8433: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = Buffers; -function Buffers (bufs) { - if (!(this instanceof Buffers)) return new Buffers(bufs); - this.buffers = bufs || []; - this.length = this.buffers.reduce(function (size, buf) { - return size + buf.length - }, 0); -} +module.exports = function (Promise, apiRejection, tryConvertToPromise, + createContext, INTERNAL, debug) { + var util = __nccwpck_require__(2587); + var TypeError = (__nccwpck_require__(5514).TypeError); + var inherits = (__nccwpck_require__(2587).inherits); + var errorObj = util.errorObj; + var tryCatch = util.tryCatch; + var NULL = {}; -Buffers.prototype.push = function () { - for (var i = 0; i < arguments.length; i++) { - if (!Buffer.isBuffer(arguments[i])) { - throw new TypeError('Tried to push a non-buffer'); - } - } - - for (var i = 0; i < arguments.length; i++) { - var buf = arguments[i]; - this.buffers.push(buf); - this.length += buf.length; + function thrower(e) { + setTimeout(function(){throw e;}, 0); } - return this.length; -}; -Buffers.prototype.unshift = function () { - for (var i = 0; i < arguments.length; i++) { - if (!Buffer.isBuffer(arguments[i])) { - throw new TypeError('Tried to unshift a non-buffer'); + function castPreservingDisposable(thenable) { + var maybePromise = tryConvertToPromise(thenable); + if (maybePromise !== thenable && + typeof thenable._isDisposable === "function" && + typeof thenable._getDisposer === "function" && + thenable._isDisposable()) { + maybePromise._setDisposable(thenable._getDisposer()); } + return maybePromise; } - - for (var i = 0; i < arguments.length; i++) { - var buf = arguments[i]; - this.buffers.unshift(buf); - this.length += buf.length; - } - return this.length; -}; - -Buffers.prototype.copy = function (dst, dStart, start, end) { - return this.slice(start, end).copy(dst, dStart, 0, end - start); -}; - -Buffers.prototype.splice = function (i, howMany) { - var buffers = this.buffers; - var index = i >= 0 ? i : this.length - i; - var reps = [].slice.call(arguments, 2); - - if (howMany === undefined) { - howMany = this.length - index; - } - else if (howMany > this.length - index) { - howMany = this.length - index; - } - - for (var i = 0; i < reps.length; i++) { - this.length += reps[i].length; - } - - var removed = new Buffers(); - var bytes = 0; - - var startBytes = 0; - for ( - var ii = 0; - ii < buffers.length && startBytes + buffers[ii].length < index; - ii ++ - ) { startBytes += buffers[ii].length } - - if (index - startBytes > 0) { - var start = index - startBytes; - - if (start + howMany < buffers[ii].length) { - removed.push(buffers[ii].slice(start, start + howMany)); - - var orig = buffers[ii]; - //var buf = new Buffer(orig.length - howMany); - var buf0 = new Buffer(start); - for (var i = 0; i < start; i++) { - buf0[i] = orig[i]; - } - - var buf1 = new Buffer(orig.length - start - howMany); - for (var i = start + howMany; i < orig.length; i++) { - buf1[ i - howMany - start ] = orig[i] - } - - if (reps.length > 0) { - var reps_ = reps.slice(); - reps_.unshift(buf0); - reps_.push(buf1); - buffers.splice.apply(buffers, [ ii, 1 ].concat(reps_)); - ii += reps_.length; - reps = []; - } - else { - buffers.splice(ii, 1, buf0, buf1); - //buffers[ii] = buf; - ii += 2; + function dispose(resources, inspection) { + var i = 0; + var len = resources.length; + var ret = new Promise(INTERNAL); + function iterator() { + if (i >= len) return ret._fulfill(); + var maybePromise = castPreservingDisposable(resources[i++]); + if (maybePromise instanceof Promise && + maybePromise._isDisposable()) { + try { + maybePromise = tryConvertToPromise( + maybePromise._getDisposer().tryDispose(inspection), + resources.promise); + } catch (e) { + return thrower(e); + } + if (maybePromise instanceof Promise) { + return maybePromise._then(iterator, thrower, + null, null, null); + } } + iterator(); } - else { - removed.push(buffers[ii].slice(start)); - buffers[ii] = buffers[ii].slice(0, start); - ii ++; - } + iterator(); + return ret; } - - if (reps.length > 0) { - buffers.splice.apply(buffers, [ ii, 0 ].concat(reps)); - ii += reps.length; + + function Disposer(data, promise, context) { + this._data = data; + this._promise = promise; + this._context = context; } - - while (removed.length < howMany) { - var buf = buffers[ii]; - var len = buf.length; - var take = Math.min(len, howMany - removed.length); - - if (take === len) { - removed.push(buf); - buffers.splice(ii, 1); + + Disposer.prototype.data = function () { + return this._data; + }; + + Disposer.prototype.promise = function () { + return this._promise; + }; + + Disposer.prototype.resource = function () { + if (this.promise().isFulfilled()) { + return this.promise().value(); } - else { - removed.push(buf.slice(0, take)); - buffers[ii] = buffers[ii].slice(take); + return NULL; + }; + + Disposer.prototype.tryDispose = function(inspection) { + var resource = this.resource(); + var context = this._context; + if (context !== undefined) context._pushContext(); + var ret = resource !== NULL + ? this.doDispose(resource, inspection) : null; + if (context !== undefined) context._popContext(); + this._promise._unsetDisposable(); + this._data = null; + return ret; + }; + + Disposer.isDisposer = function (d) { + return (d != null && + typeof d.resource === "function" && + typeof d.tryDispose === "function"); + }; + + function FunctionDisposer(fn, promise, context) { + this.constructor$(fn, promise, context); + } + inherits(FunctionDisposer, Disposer); + + FunctionDisposer.prototype.doDispose = function (resource, inspection) { + var fn = this.data(); + return fn.call(resource, resource, inspection); + }; + + function maybeUnwrapDisposer(value) { + if (Disposer.isDisposer(value)) { + this.resources[this.index]._setDisposable(value); + return value.promise(); } + return value; } - - this.length -= removed.length; - - return removed; -}; - -Buffers.prototype.slice = function (i, j) { - var buffers = this.buffers; - if (j === undefined) j = this.length; - if (i === undefined) i = 0; - - if (j > this.length) j = this.length; - - var startBytes = 0; - for ( - var si = 0; - si < buffers.length && startBytes + buffers[si].length <= i; - si ++ - ) { startBytes += buffers[si].length } - - var target = new Buffer(j - i); - - var ti = 0; - for (var ii = si; ti < j - i && ii < buffers.length; ii++) { - var len = buffers[ii].length; - - var start = ti === 0 ? i - startBytes : 0; - var end = ti + len >= j - i - ? Math.min(start + (j - i) - ti, len) - : len - ; - - buffers[ii].copy(target, ti, start, end); - ti += end - start; + + function ResourceList(length) { + this.length = length; + this.promise = null; + this[length-1] = null; } - - return target; -}; -Buffers.prototype.pos = function (i) { - if (i < 0 || i >= this.length) throw new Error('oob'); - var l = i, bi = 0, bu = null; - for (;;) { - bu = this.buffers[bi]; - if (l < bu.length) { - return {buf: bi, offset: l}; + ResourceList.prototype._resultCancelled = function() { + var len = this.length; + for (var i = 0; i < len; ++i) { + var item = this[i]; + if (item instanceof Promise) { + item.cancel(); + } + } + }; + + Promise.using = function () { + var len = arguments.length; + if (len < 2) return apiRejection( + "you must pass at least 2 arguments to Promise.using"); + var fn = arguments[len - 1]; + if (typeof fn !== "function") { + return apiRejection("expecting a function but got " + util.classString(fn)); + } + var input; + var spreadArgs = true; + if (len === 2 && Array.isArray(arguments[0])) { + input = arguments[0]; + len = input.length; + spreadArgs = false; } else { - l -= bu.length; + input = arguments; + len--; + } + var resources = new ResourceList(len); + for (var i = 0; i < len; ++i) { + var resource = input[i]; + if (Disposer.isDisposer(resource)) { + var disposer = resource; + resource = resource.promise(); + resource._setDisposable(disposer); + } else { + var maybePromise = tryConvertToPromise(resource); + if (maybePromise instanceof Promise) { + resource = + maybePromise._then(maybeUnwrapDisposer, null, null, { + resources: resources, + index: i + }, undefined); + } + } + resources[i] = resource; } - bi++; - } -}; - -Buffers.prototype.get = function get (i) { - var pos = this.pos(i); - - return this.buffers[pos.buf].get(pos.offset); -}; - -Buffers.prototype.set = function set (i, b) { - var pos = this.pos(i); - - return this.buffers[pos.buf].set(pos.offset, b); -}; -Buffers.prototype.indexOf = function (needle, offset) { - if ("string" === typeof needle) { - needle = new Buffer(needle); - } else if (needle instanceof Buffer) { - // already a buffer - } else { - throw new Error('Invalid type for a search string'); - } + var reflectedResources = new Array(resources.length); + for (var i = 0; i < reflectedResources.length; ++i) { + reflectedResources[i] = Promise.resolve(resources[i]).reflect(); + } - if (!needle.length) { - return 0; - } + var resultPromise = Promise.all(reflectedResources) + .then(function(inspections) { + for (var i = 0; i < inspections.length; ++i) { + var inspection = inspections[i]; + if (inspection.isRejected()) { + errorObj.e = inspection.error(); + return errorObj; + } else if (!inspection.isFulfilled()) { + resultPromise.cancel(); + return; + } + inspections[i] = inspection.value(); + } + promise._pushContext(); - if (!this.length) { - return -1; - } + fn = tryCatch(fn); + var ret = spreadArgs + ? fn.apply(undefined, inspections) : fn(inspections); + var promiseCreated = promise._popContext(); + debug.checkForgottenReturns( + ret, promiseCreated, "Promise.using", promise); + return ret; + }); - var i = 0, j = 0, match = 0, mstart, pos = 0; + var promise = resultPromise.lastly(function() { + var inspection = new Promise.PromiseInspection(resultPromise); + return dispose(resources, inspection); + }); + resources.promise = promise; + promise._setOnCancel(resources); + return promise; + }; - // start search from a particular point in the virtual buffer - if (offset) { - var p = this.pos(offset); - i = p.buf; - j = p.offset; - pos = offset; - } + Promise.prototype._setDisposable = function (disposer) { + this._bitField = this._bitField | 131072; + this._disposer = disposer; + }; - // for each character in virtual buffer - for (;;) { - while (j >= this.buffers[i].length) { - j = 0; - i++; + Promise.prototype._isDisposable = function () { + return (this._bitField & 131072) > 0; + }; - if (i >= this.buffers.length) { - // search string not found - return -1; - } - } + Promise.prototype._getDisposer = function () { + return this._disposer; + }; - var char = this.buffers[i][j]; + Promise.prototype._unsetDisposable = function () { + this._bitField = this._bitField & (~131072); + this._disposer = undefined; + }; - if (char == needle[match]) { - // keep track where match started - if (match == 0) { - mstart = { - i: i, - j: j, - pos: pos - }; - } - match++; - if (match == needle.length) { - // full match - return mstart.pos; - } - } else if (match != 0) { - // a partial match ended, go back to match starting position - // this will continue the search at the next character - i = mstart.i; - j = mstart.j; - pos = mstart.pos; - match = 0; + Promise.prototype.disposer = function (fn) { + if (typeof fn === "function") { + return new FunctionDisposer(fn, this, createContext()); } + throw new TypeError(); + }; - j++; - pos++; - } }; -Buffers.prototype.toBuffer = function() { - return this.slice(); -} - -Buffers.prototype.toString = function(encoding, start, end) { - return this.slice(start, end).toString(encoding); -} - /***/ }), -/***/ 6533: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2587: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { -var Traverse = __nccwpck_require__(8588); -var EventEmitter = (__nccwpck_require__(2361).EventEmitter); -module.exports = Chainsaw; -function Chainsaw (builder) { - var saw = Chainsaw.saw(builder, {}); - var r = builder.call(saw.handlers, saw); - if (r !== undefined) saw.handlers = r; - saw.record(); - return saw.chain(); -}; +var es5 = __nccwpck_require__(9451); +var canEvaluate = typeof navigator == "undefined"; -Chainsaw.light = function ChainsawLight (builder) { - var saw = Chainsaw.saw(builder, {}); - var r = builder.call(saw.handlers, saw); - if (r !== undefined) saw.handlers = r; - return saw.chain(); -}; +var errorObj = {e: {}}; +var tryCatchTarget; +var globalObject = typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + this !== undefined ? this : null; -Chainsaw.saw = function (builder, handlers) { - var saw = new EventEmitter; - saw.handlers = handlers; - saw.actions = []; +function tryCatcher() { + try { + var target = tryCatchTarget; + tryCatchTarget = null; + return target.apply(this, arguments); + } catch (e) { + errorObj.e = e; + return errorObj; + } +} +function tryCatch(fn) { + tryCatchTarget = fn; + return tryCatcher; +} - saw.chain = function () { - var ch = Traverse(saw.handlers).map(function (node) { - if (this.isRoot) return node; - var ps = this.path; +var inherits = function(Child, Parent) { + var hasProp = {}.hasOwnProperty; - if (typeof node === 'function') { - this.update(function () { - saw.actions.push({ - path : ps, - args : [].slice.call(arguments) - }); - return ch; - }); + function T() { + this.constructor = Child; + this.constructor$ = Parent; + for (var propertyName in Parent.prototype) { + if (hasProp.call(Parent.prototype, propertyName) && + propertyName.charAt(propertyName.length-1) !== "$" + ) { + this[propertyName + "$"] = Parent.prototype[propertyName]; } - }); - - process.nextTick(function () { - saw.emit('begin'); - saw.next(); - }); - - return ch; - }; - - saw.pop = function () { - return saw.actions.shift(); - }; - - saw.next = function () { - var action = saw.pop(); - - if (!action) { - saw.emit('end'); - } - else if (!action.trap) { - var node = saw.handlers; - action.path.forEach(function (key) { node = node[key] }); - node.apply(saw.handlers, action.args); - } - }; - - saw.nest = function (cb) { - var args = [].slice.call(arguments, 1); - var autonext = true; - - if (typeof cb === 'boolean') { - var autonext = cb; - cb = args.shift(); } + } + T.prototype = Parent.prototype; + Child.prototype = new T(); + return Child.prototype; +}; - var s = Chainsaw.saw(builder, {}); - var r = builder.call(s.handlers, s); - if (r !== undefined) s.handlers = r; +function isPrimitive(val) { + return val == null || val === true || val === false || + typeof val === "string" || typeof val === "number"; - // If we are recording... - if ("undefined" !== typeof saw.step) { - // ... our children should, too - s.record(); - } +} - cb.apply(s.chain(), args); - if (autonext !== false) s.on('end', saw.next); - }; +function isObject(value) { + return typeof value === "function" || + typeof value === "object" && value !== null; +} - saw.record = function () { - upgradeChainsaw(saw); - }; +function maybeWrapAsError(maybeError) { + if (!isPrimitive(maybeError)) return maybeError; - ['trap', 'down', 'jump'].forEach(function (method) { - saw[method] = function () { - throw new Error("To use the trap, down and jump features, please "+ - "call record() first to start recording actions."); - }; - }); + return new Error(safeToString(maybeError)); +} - return saw; -}; +function withAppended(target, appendee) { + var len = target.length; + var ret = new Array(len + 1); + var i; + for (i = 0; i < len; ++i) { + ret[i] = target[i]; + } + ret[i] = appendee; + return ret; +} -function upgradeChainsaw(saw) { - saw.step = 0; +function getDataPropertyOrDefault(obj, key, defaultValue) { + if (es5.isES5) { + var desc = Object.getOwnPropertyDescriptor(obj, key); - // override pop - saw.pop = function () { - return saw.actions[saw.step++]; - }; + if (desc != null) { + return desc.get == null && desc.set == null + ? desc.value + : defaultValue; + } + } else { + return {}.hasOwnProperty.call(obj, key) ? obj[key] : undefined; + } +} - saw.trap = function (name, cb) { - var ps = Array.isArray(name) ? name : [name]; - saw.actions.push({ - path : ps, - step : saw.step, - cb : cb, - trap : true - }); +function notEnumerableProp(obj, name, value) { + if (isPrimitive(obj)) return obj; + var descriptor = { + value: value, + configurable: true, + enumerable: false, + writable: true }; + es5.defineProperty(obj, name, descriptor); + return obj; +} - saw.down = function (name) { - var ps = (Array.isArray(name) ? name : [name]).join('/'); - var i = saw.actions.slice(saw.step).map(function (x) { - if (x.trap && x.step <= saw.step) return false; - return x.path.join('/') == ps; - }).indexOf(true); +function thrower(r) { + throw r; +} - if (i >= 0) saw.step += i; - else saw.step = saw.actions.length; +var inheritedDataKeys = (function() { + var excludedPrototypes = [ + Array.prototype, + Object.prototype, + Function.prototype + ]; - var act = saw.actions[saw.step - 1]; - if (act && act.trap) { - // It's a trap! - saw.step = act.step; - act.cb(); + var isExcludedProto = function(val) { + for (var i = 0; i < excludedPrototypes.length; ++i) { + if (excludedPrototypes[i] === val) { + return true; + } } - else saw.next(); + return false; }; - saw.jump = function (step) { - saw.step = step; - saw.next(); - }; -}; + if (es5.isES5) { + var getKeys = Object.getOwnPropertyNames; + return function(obj) { + var ret = []; + var visitedKeys = Object.create(null); + while (obj != null && !isExcludedProto(obj)) { + var keys; + try { + keys = getKeys(obj); + } catch (e) { + return ret; + } + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + if (visitedKeys[key]) continue; + visitedKeys[key] = true; + var desc = Object.getOwnPropertyDescriptor(obj, key); + if (desc != null && desc.get == null && desc.set == null) { + ret.push(key); + } + } + obj = es5.getPrototypeOf(obj); + } + return ret; + }; + } else { + var hasProp = {}.hasOwnProperty; + return function(obj) { + if (isExcludedProto(obj)) return []; + var ret = []; + /*jshint forin:false */ + enumeration: for (var key in obj) { + if (hasProp.call(obj, key)) { + ret.push(key); + } else { + for (var i = 0; i < excludedPrototypes.length; ++i) { + if (hasProp.call(excludedPrototypes[i], key)) { + continue enumeration; + } + } + ret.push(key); + } + } + return ret; + }; + } -/***/ }), +})(); -/***/ 9051: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var thisAssignmentPattern = /this\s*\.\s*\S+\s*=/; +function isClass(fn) { + try { + if (typeof fn === "function") { + var keys = es5.names(fn.prototype); + var hasMethods = es5.isES5 && keys.length > 1; + var hasMethodsOtherThanConstructor = keys.length > 0 && + !(keys.length === 1 && keys[0] === "constructor"); + var hasThisAssignmentAndStaticMethods = + thisAssignmentPattern.test(fn + "") && es5.names(fn).length > 0; -const fs = __nccwpck_require__(7147) -const path = __nccwpck_require__(1017) + if (hasMethods || hasMethodsOtherThanConstructor || + hasThisAssignmentAndStaticMethods) { + return true; + } + } + return false; + } catch (e) { + return false; + } +} -/* istanbul ignore next */ -const LCHOWN = fs.lchown ? 'lchown' : 'chown' -/* istanbul ignore next */ -const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync' +function toFastProperties(obj) { + /*jshint -W027,-W055,-W031*/ + function FakeConstructor() {} + FakeConstructor.prototype = obj; + var l = 8; + while (l--) new FakeConstructor(); + return obj; + eval(obj); +} -/* istanbul ignore next */ -const needEISDIRHandled = fs.lchown && - !process.version.match(/v1[1-9]+\./) && - !process.version.match(/v10\.[6-9]/) +var rident = /^[a-z$_][a-z$_0-9]*$/i; +function isIdentifier(str) { + return rident.test(str); +} -const lchownSync = (path, uid, gid) => { - try { - return fs[LCHOWNSYNC](path, uid, gid) - } catch (er) { - if (er.code !== 'ENOENT') - throw er - } +function filledRange(count, prefix, suffix) { + var ret = new Array(count); + for(var i = 0; i < count; ++i) { + ret[i] = prefix + i + suffix; + } + return ret; } -/* istanbul ignore next */ -const chownSync = (path, uid, gid) => { - try { - return fs.chownSync(path, uid, gid) - } catch (er) { - if (er.code !== 'ENOENT') - throw er - } +function safeToString(obj) { + try { + return obj + ""; + } catch (e) { + return "[no string representation]"; + } } -/* istanbul ignore next */ -const handleEISDIR = - needEISDIRHandled ? (path, uid, gid, cb) => er => { - // Node prior to v10 had a very questionable implementation of - // fs.lchown, which would always try to call fs.open on a directory - // Fall back to fs.chown in those cases. - if (!er || er.code !== 'EISDIR') - cb(er) - else - fs.chown(path, uid, gid, cb) - } - : (_, __, ___, cb) => cb +function isError(obj) { + return obj !== null && + typeof obj === "object" && + typeof obj.message === "string" && + typeof obj.name === "string"; +} -/* istanbul ignore next */ -const handleEISDirSync = - needEISDIRHandled ? (path, uid, gid) => { +function markAsOriginatingFromRejection(e) { try { - return lchownSync(path, uid, gid) - } catch (er) { - if (er.code !== 'EISDIR') - throw er - chownSync(path, uid, gid) + notEnumerableProp(e, "isOperational", true); } - } - : (path, uid, gid) => lchownSync(path, uid, gid) + catch(ignore) {} +} -// fs.readdir could only accept an options object as of node v6 -const nodeVersion = process.version -let readdir = (path, options, cb) => fs.readdir(path, options, cb) -let readdirSync = (path, options) => fs.readdirSync(path, options) -/* istanbul ignore next */ -if (/^v4\./.test(nodeVersion)) - readdir = (path, options, cb) => fs.readdir(path, cb) +function originatesFromRejection(e) { + if (e == null) return false; + return ((e instanceof Error["__BluebirdErrorTypes__"].OperationalError) || + e["isOperational"] === true); +} -const chown = (cpath, uid, gid, cb) => { - fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => { - // Skip ENOENT error - cb(er && er.code !== 'ENOENT' ? er : null) - })) +function canAttachTrace(obj) { + return isError(obj) && es5.propertyIsWritable(obj, "stack"); } -const chownrKid = (p, child, uid, gid, cb) => { - if (typeof child === 'string') - return fs.lstat(path.resolve(p, child), (er, stats) => { - // Skip ENOENT error - if (er) - return cb(er.code !== 'ENOENT' ? er : null) - stats.name = child - chownrKid(p, stats, uid, gid, cb) - }) +var ensureErrorObject = (function() { + if (!("stack" in new Error())) { + return function(value) { + if (canAttachTrace(value)) return value; + try {throw new Error(safeToString(value));} + catch(err) {return err;} + }; + } else { + return function(value) { + if (canAttachTrace(value)) return value; + return new Error(safeToString(value)); + }; + } +})(); - if (child.isDirectory()) { - chownr(path.resolve(p, child.name), uid, gid, er => { - if (er) - return cb(er) - const cpath = path.resolve(p, child.name) - chown(cpath, uid, gid, cb) - }) - } else { - const cpath = path.resolve(p, child.name) - chown(cpath, uid, gid, cb) - } +function classString(obj) { + return {}.toString.call(obj); } - -const chownr = (p, uid, gid, cb) => { - readdir(p, { withFileTypes: true }, (er, children) => { - // any error other than ENOTDIR or ENOTSUP means it's not readable, - // or doesn't exist. give up. - if (er) { - if (er.code === 'ENOENT') - return cb() - else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') - return cb(er) +function copyDescriptors(from, to, filter) { + var keys = es5.names(from); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + if (filter(key)) { + try { + es5.defineProperty(to, key, es5.getDescriptor(from, key)); + } catch (ignore) {} + } } - if (er || !children.length) - return chown(p, uid, gid, cb) +} - let len = children.length - let errState = null - const then = er => { - if (errState) - return - if (er) - return cb(errState = er) - if (-- len === 0) - return chown(p, uid, gid, cb) +var asArray = function(v) { + if (es5.isArray(v)) { + return v; } + return null; +}; - children.forEach(child => chownrKid(p, child, uid, gid, then)) - }) +if (typeof Symbol !== "undefined" && Symbol.iterator) { + var ArrayFrom = typeof Array.from === "function" ? function(v) { + return Array.from(v); + } : function(v) { + var ret = []; + var it = v[Symbol.iterator](); + var itResult; + while (!((itResult = it.next()).done)) { + ret.push(itResult.value); + } + return ret; + }; + + asArray = function(v) { + if (es5.isArray(v)) { + return v; + } else if (v != null && typeof v[Symbol.iterator] === "function") { + return ArrayFrom(v); + } + return null; + }; } -const chownrKidSync = (p, child, uid, gid) => { - if (typeof child === 'string') { - try { - const stats = fs.lstatSync(path.resolve(p, child)) - stats.name = child - child = stats - } catch (er) { - if (er.code === 'ENOENT') - return - else - throw er - } - } +var isNode = typeof process !== "undefined" && + classString(process).toLowerCase() === "[object process]"; - if (child.isDirectory()) - chownrSync(path.resolve(p, child.name), uid, gid) +var hasEnvVariables = typeof process !== "undefined" && + typeof process.env !== "undefined"; - handleEISDirSync(path.resolve(p, child.name), uid, gid) +function env(key) { + return hasEnvVariables ? process.env[key] : undefined; } -const chownrSync = (p, uid, gid) => { - let children - try { - children = readdirSync(p, { withFileTypes: true }) - } catch (er) { - if (er.code === 'ENOENT') - return - else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP') - return handleEISDirSync(p, uid, gid) - else - throw er - } - - if (children && children.length) - children.forEach(child => chownrKidSync(p, child, uid, gid)) +function getNativePromise() { + if (typeof Promise === "function") { + try { + var promise = new Promise(function(){}); + if ({}.toString.call(promise) === "[object Promise]") { + return Promise; + } + } catch (e) {} + } +} - return handleEISDirSync(p, uid, gid) +function domainBind(self, cb) { + return self.bind(cb); } -module.exports = chownr -chownr.sync = chownrSync +var ret = { + isClass: isClass, + isIdentifier: isIdentifier, + inheritedDataKeys: inheritedDataKeys, + getDataPropertyOrDefault: getDataPropertyOrDefault, + thrower: thrower, + isArray: es5.isArray, + asArray: asArray, + notEnumerableProp: notEnumerableProp, + isPrimitive: isPrimitive, + isObject: isObject, + isError: isError, + canEvaluate: canEvaluate, + errorObj: errorObj, + tryCatch: tryCatch, + inherits: inherits, + withAppended: withAppended, + maybeWrapAsError: maybeWrapAsError, + toFastProperties: toFastProperties, + filledRange: filledRange, + toString: safeToString, + canAttachTrace: canAttachTrace, + ensureErrorObject: ensureErrorObject, + originatesFromRejection: originatesFromRejection, + markAsOriginatingFromRejection: markAsOriginatingFromRejection, + classString: classString, + copyDescriptors: copyDescriptors, + hasDevTools: typeof chrome !== "undefined" && chrome && + typeof chrome.loadTimes === "function", + isNode: isNode, + hasEnvVariables: hasEnvVariables, + env: env, + global: globalObject, + getNativePromise: getNativePromise, + domainBind: domainBind +}; +ret.isRecentNode = ret.isNode && (function() { + var version = process.versions.node.split(".").map(Number); + return (version[0] === 0 && version[1] > 10) || (version[0] > 0); +})(); + +if (ret.isNode) ret.toFastProperties(process); + +try {throw new Error(); } catch (e) {ret.lastLineError = e;} +module.exports = ret; /***/ }), -/***/ 5443: +/***/ 8425: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var util = __nccwpck_require__(3837); var Stream = (__nccwpck_require__(2781).Stream); -var DelayedStream = __nccwpck_require__(8611); +var DelayedStream = __nccwpck_require__(7383); module.exports = CombinedStream; function CombinedStream() { @@ -18012,7 +13854,7 @@ CombinedStream.prototype._emitError = function(err) { /***/ }), -/***/ 6891: +/***/ 6915: /***/ ((module) => { module.exports = function (xs, fn) { @@ -18032,7 +13874,7 @@ var isArray = Array.isArray || function (xs) { /***/ }), -/***/ 5898: +/***/ 1307: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { // Copyright Joyent, Inc. and other Node contributors. @@ -18146,7 +13988,7 @@ function objectToString(o) { /***/ }), -/***/ 8611: +/***/ 7383: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Stream = (__nccwpck_require__(2781).Stream); @@ -18260,7 +14102,7 @@ DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { /***/ }), -/***/ 8932: +/***/ 2547: /***/ ((__unused_webpack_module, exports) => { @@ -18287,12 +14129,12 @@ exports.Deprecation = Deprecation; /***/ }), -/***/ 1932: +/***/ 2268: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var stream = __nccwpck_require__(1642); +var stream = __nccwpck_require__(9025); function DuplexWrapper(options, writable, readable) { if (typeof readable === "undefined") { @@ -18370,7 +14212,7 @@ module.exports.DuplexWrapper = DuplexWrapper; /***/ }), -/***/ 1133: +/***/ 4048: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var debug; @@ -18379,7 +14221,7 @@ module.exports = function () { if (!debug) { try { /* eslint global-require: off */ - debug = __nccwpck_require__(9975)("follow-redirects"); + debug = __nccwpck_require__(1871)("follow-redirects"); } catch (error) { /* */ } if (typeof debug !== "function") { @@ -18392,7 +14234,7 @@ module.exports = function () { /***/ }), -/***/ 7707: +/***/ 2663: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var url = __nccwpck_require__(7310); @@ -18401,7 +14243,31 @@ var http = __nccwpck_require__(3685); var https = __nccwpck_require__(5687); var Writable = (__nccwpck_require__(2781).Writable); var assert = __nccwpck_require__(9491); -var debug = __nccwpck_require__(1133); +var debug = __nccwpck_require__(4048); + +// Whether to use the native URL object or the legacy url module +var useNativeURL = false; +try { + assert(new URL()); +} +catch (error) { + useNativeURL = error.code === "ERR_INVALID_URL"; +} + +// URL fields to preserve in copy operations +var preservedUrlFields = [ + "auth", + "host", + "hostname", + "href", + "path", + "pathname", + "port", + "protocol", + "query", + "search", + "hash", +]; // Create handlers that pass events from native requests var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; @@ -18412,19 +14278,20 @@ events.forEach(function (event) { }; }); +// Error types with codes var InvalidUrlError = createErrorType( "ERR_INVALID_URL", "Invalid URL", TypeError ); -// Error types with codes var RedirectionError = createErrorType( "ERR_FR_REDIRECTION_FAILURE", "Redirected request failed" ); var TooManyRedirectsError = createErrorType( "ERR_FR_TOO_MANY_REDIRECTS", - "Maximum number of redirects exceeded" + "Maximum number of redirects exceeded", + RedirectionError ); var MaxBodyLengthExceededError = createErrorType( "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", @@ -18459,7 +14326,13 @@ function RedirectableRequest(options, responseCallback) { // React to responses of native requests var self = this; this._onNativeResponse = function (response) { - self._processResponse(response); + try { + self._processResponse(response); + } + catch (cause) { + self.emit("error", cause instanceof RedirectionError ? + cause : new RedirectionError({ cause: cause })); + } }; // Perform the first request @@ -18677,8 +14550,7 @@ RedirectableRequest.prototype._performRequest = function () { var protocol = this._options.protocol; var nativeProtocol = this._options.nativeProtocols[protocol]; if (!nativeProtocol) { - this.emit("error", new TypeError("Unsupported protocol " + protocol)); - return; + throw new TypeError("Unsupported protocol " + protocol); } // If specified, use the agent corresponding to the protocol @@ -18777,8 +14649,7 @@ RedirectableRequest.prototype._processResponse = function (response) { // RFC7231§6.4: A client SHOULD detect and intervene // in cyclical redirections (i.e., "infinite" redirection loops). if (++this._redirectCount > this._options.maxRedirects) { - this.emit("error", new TooManyRedirectsError()); - return; + throw new TooManyRedirectsError(); } // Store the request headers if applicable @@ -18812,34 +14683,24 @@ RedirectableRequest.prototype._processResponse = function (response) { var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); // If the redirect is relative, carry over the host of the last request - var currentUrlParts = url.parse(this._currentUrl); + var currentUrlParts = parseUrl(this._currentUrl); var currentHost = currentHostHeader || currentUrlParts.host; var currentUrl = /^\w+:/.test(location) ? this._currentUrl : url.format(Object.assign(currentUrlParts, { host: currentHost })); - // Determine the URL of the redirection - var redirectUrl; - try { - redirectUrl = url.resolve(currentUrl, location); - } - catch (cause) { - this.emit("error", new RedirectionError({ cause: cause })); - return; - } - // Create the redirected request - debug("redirecting to", redirectUrl); + var redirectUrl = resolveUrl(location, currentUrl); + debug("redirecting to", redirectUrl.href); this._isRedirect = true; - var redirectUrlParts = url.parse(redirectUrl); - Object.assign(this._options, redirectUrlParts); + spreadUrlObject(redirectUrl, this._options); // Drop confidential headers when redirecting to a less secure protocol // or to a different domain that is not a superdomain - if (redirectUrlParts.protocol !== currentUrlParts.protocol && - redirectUrlParts.protocol !== "https:" || - redirectUrlParts.host !== currentHost && - !isSubdomain(redirectUrlParts.host, currentHost)) { - removeMatchingHeaders(/^(?:authorization|cookie)$/i, this._options.headers); + if (redirectUrl.protocol !== currentUrlParts.protocol && + redirectUrl.protocol !== "https:" || + redirectUrl.host !== currentHost && + !isSubdomain(redirectUrl.host, currentHost)) { + removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers); } // Evaluate the beforeRedirect callback @@ -18853,23 +14714,12 @@ RedirectableRequest.prototype._processResponse = function (response) { method: method, headers: requestHeaders, }; - try { - beforeRedirect(this._options, responseDetails, requestDetails); - } - catch (err) { - this.emit("error", err); - return; - } + beforeRedirect(this._options, responseDetails, requestDetails); this._sanitizeOptions(this._options); } // Perform the redirected request - try { - this._performRequest(); - } - catch (cause) { - this.emit("error", new RedirectionError({ cause: cause })); - } + this._performRequest(); }; // Wraps the key/value object of protocols with redirect functionality @@ -18889,27 +14739,16 @@ function wrap(protocols) { // Executes a request, following redirects function request(input, options, callback) { - // Parse parameters - if (isString(input)) { - var parsed; - try { - parsed = urlToOptions(new URL(input)); - } - catch (err) { - /* istanbul ignore next */ - parsed = url.parse(input); - } - if (!isString(parsed.protocol)) { - throw new InvalidUrlError({ input }); - } - input = parsed; + // Parse parameters, ensuring that input is an object + if (isURL(input)) { + input = spreadUrlObject(input); } - else if (URL && (input instanceof URL)) { - input = urlToOptions(input); + else if (isString(input)) { + input = spreadUrlObject(parseUrl(input)); } else { callback = options; - options = input; + options = validateUrl(input); input = { protocol: protocol }; } if (isFunction(options)) { @@ -18948,27 +14787,57 @@ function wrap(protocols) { return exports; } -/* istanbul ignore next */ -function noop() { /* empty */ } +function noop() { /* empty */ } + +function parseUrl(input) { + var parsed; + /* istanbul ignore else */ + if (useNativeURL) { + parsed = new URL(input); + } + else { + // Ensure the URL is valid and absolute + parsed = validateUrl(url.parse(input)); + if (!isString(parsed.protocol)) { + throw new InvalidUrlError({ input }); + } + } + return parsed; +} + +function resolveUrl(relative, base) { + /* istanbul ignore next */ + return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); +} + +function validateUrl(input) { + if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { + throw new InvalidUrlError({ input: input.href || input }); + } + if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { + throw new InvalidUrlError({ input: input.href || input }); + } + return input; +} -// from https://github.com/nodejs/node/blob/master/lib/internal/url.js -function urlToOptions(urlObject) { - var options = { - protocol: urlObject.protocol, - hostname: urlObject.hostname.startsWith("[") ? - /* istanbul ignore next */ - urlObject.hostname.slice(1, -1) : - urlObject.hostname, - hash: urlObject.hash, - search: urlObject.search, - pathname: urlObject.pathname, - path: urlObject.pathname + urlObject.search, - href: urlObject.href, - }; - if (urlObject.port !== "") { - options.port = Number(urlObject.port); +function spreadUrlObject(urlObject, target) { + var spread = target || {}; + for (var key of preservedUrlFields) { + spread[key] = urlObject[key]; + } + + // Fix IPv6 hostname + if (spread.hostname.startsWith("[")) { + spread.hostname = spread.hostname.slice(1, -1); + } + // Ensure port is a number + if (spread.port !== "") { + spread.port = Number(spread.port); } - return options; + // Concatenate path + spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; + + return spread; } function removeMatchingHeaders(regex, headers) { @@ -18994,8 +14863,16 @@ function createErrorType(code, message, baseClass) { // Attach constructor and set default properties CustomError.prototype = new (baseClass || Error)(); - CustomError.prototype.constructor = CustomError; - CustomError.prototype.name = "Error [" + code + "]"; + Object.defineProperties(CustomError.prototype, { + constructor: { + value: CustomError, + enumerable: false, + }, + name: { + value: "Error [" + code + "]", + enumerable: false, + }, + }); return CustomError; } @@ -19025,6 +14902,10 @@ function isBuffer(value) { return typeof value === "object" && ("length" in value); } +function isURL(value) { + return URL && value instanceof URL; +} + // Exports module.exports = wrap({ http: http, https: https }); module.exports.wrap = wrap; @@ -19032,10 +14913,10 @@ module.exports.wrap = wrap; /***/ }), -/***/ 4334: +/***/ 9919: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var CombinedStream = __nccwpck_require__(5443); +var CombinedStream = __nccwpck_require__(8425); var util = __nccwpck_require__(3837); var path = __nccwpck_require__(1017); var http = __nccwpck_require__(3685); @@ -19043,9 +14924,9 @@ var https = __nccwpck_require__(5687); var parseUrl = (__nccwpck_require__(7310).parse); var fs = __nccwpck_require__(7147); var Stream = (__nccwpck_require__(2781).Stream); -var mime = __nccwpck_require__(3583); -var asynckit = __nccwpck_require__(4812); -var populate = __nccwpck_require__(7142); +var mime = __nccwpck_require__(9324); +var asynckit = __nccwpck_require__(9930); +var populate = __nccwpck_require__(8413); // Public API module.exports = FormData; @@ -19540,7 +15421,7 @@ FormData.prototype.toString = function () { /***/ }), -/***/ 7142: +/***/ 8413: /***/ ((module) => { // populates missing values @@ -19557,5166 +15438,5622 @@ module.exports = function(dst, src) { /***/ }), -/***/ 7714: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5717: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch -const MiniPass = __nccwpck_require__(2505) -const EE = (__nccwpck_require__(2361).EventEmitter) -const fs = __nccwpck_require__(7147) - -let writev = fs.writev -/* istanbul ignore next */ -if (!writev) { - // This entire block can be removed if support for earlier than Node.js - // 12.9.0 is not needed. - const binding = process.binding('fs') - const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback - - writev = (fd, iovec, pos, cb) => { - const done = (er, bw) => cb(er, bw, iovec) - const req = new FSReqWrap() - req.oncomplete = done - binding.writeBuffers(fd, iovec, pos, req) - } -} - -const _autoClose = Symbol('_autoClose') -const _close = Symbol('_close') -const _ended = Symbol('_ended') -const _fd = Symbol('_fd') -const _finished = Symbol('_finished') -const _flags = Symbol('_flags') -const _flush = Symbol('_flush') -const _handleChunk = Symbol('_handleChunk') -const _makeBuf = Symbol('_makeBuf') -const _mode = Symbol('_mode') -const _needDrain = Symbol('_needDrain') -const _onerror = Symbol('_onerror') -const _onopen = Symbol('_onopen') -const _onread = Symbol('_onread') -const _onwrite = Symbol('_onwrite') -const _open = Symbol('_open') -const _path = Symbol('_path') -const _pos = Symbol('_pos') -const _queue = Symbol('_queue') -const _read = Symbol('_read') -const _readSize = Symbol('_readSize') -const _reading = Symbol('_reading') -const _remain = Symbol('_remain') -const _size = Symbol('_size') -const _write = Symbol('_write') -const _writing = Symbol('_writing') -const _defaultFlag = Symbol('_defaultFlag') -const _errored = Symbol('_errored') - -class ReadStream extends MiniPass { - constructor (path, opt) { - opt = opt || {} - super(opt) - - this.readable = true - this.writable = false - - if (typeof path !== 'string') - throw new TypeError('path must be a string') - - this[_errored] = false - this[_fd] = typeof opt.fd === 'number' ? opt.fd : null - this[_path] = path - this[_readSize] = opt.readSize || 16*1024*1024 - this[_reading] = false - this[_size] = typeof opt.size === 'number' ? opt.size : Infinity - this[_remain] = this[_size] - this[_autoClose] = typeof opt.autoClose === 'boolean' ? - opt.autoClose : true - - if (typeof this[_fd] === 'number') - this[_read]() - else - this[_open]() - } +var fs = __nccwpck_require__(7147) +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync - get fd () { return this[_fd] } - get path () { return this[_path] } +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = __nccwpck_require__(5313) + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} - write () { - throw new TypeError('this is a readable stream') +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) } - end () { - throw new TypeError('this is a readable stream') + if (typeof cache === 'function') { + cb = cache + cache = null } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} - [_open] () { - fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) } - [_onopen] (er, fd) { - if (er) - this[_onerror](er) - else { - this[_fd] = fd - this.emit('open', fd) - this[_read]() + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er } } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} + + +/***/ }), + +/***/ 5313: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = __nccwpck_require__(1017); +var isWindows = process.platform === 'win32'; +var fs = __nccwpck_require__(7147); + +// JavaScript implementation of realpath, ported from node pre-v6 - [_makeBuf] () { - return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } } - [_read] () { - if (!this[_reading]) { - this[_reading] = true - const buf = this[_makeBuf]() - /* istanbul ignore if */ - if (buf.length === 0) - return process.nextTick(() => this[_onread](null, 0, buf)) - fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) => - this[_onread](er, br, buf)) + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } } } +} - [_onread] (er, br, buf) { - this[_reading] = false - if (er) - this[_onerror](er) - else if (this[_handleChunk](br, buf)) - this[_read]() +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; } - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; } } - [_onerror] (er) { - this[_reading] = true - this[_close]() - this.emit('error', er) - } + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } - [_handleChunk] (br, buf) { - let ret = false - // no effect if infinite - this[_remain] -= br - if (br > 0) - ret = super.write(br < buf.length ? buf.slice(0, br) : buf) + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } - if (br === 0 || this[_remain] <= 0) { - ret = false - this[_close]() - super.end() + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; } - return ret + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); } - emit (ev, data) { - switch (ev) { - case 'prefinish': - case 'finish': - break + if (cache) cache[original] = p; - case 'drain': - if (typeof this[_fd] === 'number') - this[_read]() - break + return p; +}; - case 'error': - if (this[_errored]) - return - this[_errored] = true - return super.emit(ev, data) - default: - return super.emit(ev, data) +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); } } -} -class ReadStreamSync extends ReadStream { - [_open] () { - let threw = true - try { - this[_onopen](null, fs.openSync(this[_path], 'r')) - threw = false - } finally { - if (threw) - this[_close]() + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); } + + return fs.lstat(base, gotStat); } - [_read] () { - let threw = true - try { - if (!this[_reading]) { - this[_reading] = true - do { - const buf = this[_makeBuf]() - /* istanbul ignore next */ - const br = buf.length === 0 ? 0 - : fs.readSync(this[_fd], buf, 0, buf.length, null) - if (!this[_handleChunk](br, buf)) - break - } while (true) - this[_reading] = false + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); } - threw = false - } finally { - if (threw) - this[_close]() } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); } - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.closeSync(fd) - this.emit('close') - } + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; + + +/***/ }), + +/***/ 6071: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +var __webpack_unused_export__; +/* unused reexport */ __nccwpck_require__(3652) +/* unused reexport */ __nccwpck_require__(4116) +exports.Writer = __nccwpck_require__(989) + +exports.$B = { + Reader: __nccwpck_require__(8331), + Writer: __nccwpck_require__(3009) +} + +exports.Lv = { + Reader: __nccwpck_require__(2983), + Writer: __nccwpck_require__(178) +} + +exports.rU = { + Reader: __nccwpck_require__(3773), + Writer: __nccwpck_require__(147) +} + +exports._S = { + Reader: __nccwpck_require__(7620), + Writer: __nccwpck_require__(4943) +} + +__webpack_unused_export__ = __webpack_unused_export__ = exports.Lv.Reader +__webpack_unused_export__ = __webpack_unused_export__ = exports.$B.Reader +__webpack_unused_export__ = __webpack_unused_export__ = exports.rU.Reader +__webpack_unused_export__ = __webpack_unused_export__ = exports._S.Reader + +exports.Writer.Dir = __webpack_unused_export__ = exports.Lv.Writer +exports.Writer.File = __webpack_unused_export__ = exports.$B.Writer +exports.Writer.Link = __webpack_unused_export__ = exports.rU.Writer +exports.Writer.Proxy = __webpack_unused_export__ = exports._S.Writer + +/* unused reexport */ __nccwpck_require__(327) + + +/***/ }), + +/***/ 3652: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// the parent class for all fstreams. + +module.exports = Abstract + +var Stream = (__nccwpck_require__(2781).Stream) +var inherits = __nccwpck_require__(445) + +function Abstract () { + Stream.call(this) +} + +inherits(Abstract, Stream) + +Abstract.prototype.on = function (ev, fn) { + if (ev === 'ready' && this.ready) { + process.nextTick(fn.bind(this)) + } else { + Stream.prototype.on.call(this, ev, fn) + } + return this +} + +Abstract.prototype.abort = function () { + this._aborted = true + this.emit('abort') +} + +Abstract.prototype.destroy = function () {} + +Abstract.prototype.warn = function (msg, code) { + var self = this + var er = decorate(msg, code, self) + if (!self.listeners('warn')) { + console.error('%s %s\n' + + 'path = %s\n' + + 'syscall = %s\n' + + 'fstream_type = %s\n' + + 'fstream_path = %s\n' + + 'fstream_unc_path = %s\n' + + 'fstream_class = %s\n' + + 'fstream_stack =\n%s\n', + code || 'UNKNOWN', + er.stack, + er.path, + er.syscall, + er.fstream_type, + er.fstream_path, + er.fstream_unc_path, + er.fstream_class, + er.fstream_stack.join('\n')) + } else { + self.emit('warn', er) } } -class WriteStream extends EE { - constructor (path, opt) { - opt = opt || {} - super(opt) - this.readable = false - this.writable = true - this[_errored] = false - this[_writing] = false - this[_ended] = false - this[_needDrain] = false - this[_queue] = [] - this[_path] = path - this[_fd] = typeof opt.fd === 'number' ? opt.fd : null - this[_mode] = opt.mode === undefined ? 0o666 : opt.mode - this[_pos] = typeof opt.start === 'number' ? opt.start : null - this[_autoClose] = typeof opt.autoClose === 'boolean' ? - opt.autoClose : true +Abstract.prototype.info = function (msg, code) { + this.emit('info', msg, code) +} - // truncating makes no sense when writing into the middle - const defaultFlag = this[_pos] !== null ? 'r+' : 'w' - this[_defaultFlag] = opt.flags === undefined - this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags +Abstract.prototype.error = function (msg, code, th) { + var er = decorate(msg, code, this) + if (th) throw er + else this.emit('error', er) +} - if (this[_fd] === null) - this[_open]() +function decorate (er, code, self) { + if (!(er instanceof Error)) er = new Error(er) + er.code = er.code || code + er.path = er.path || self.path + er.fstream_type = er.fstream_type || self.type + er.fstream_path = er.fstream_path || self.path + if (self._path !== self.path) { + er.fstream_unc_path = er.fstream_unc_path || self._path + } + if (self.linkpath) { + er.fstream_linkpath = er.fstream_linkpath || self.linkpath } + er.fstream_class = er.fstream_class || self.constructor.name + er.fstream_stack = er.fstream_stack || + new Error().stack.split(/\n/).slice(3).map(function (s) { + return s.replace(/^ {4}at /, '') + }) - emit (ev, data) { - if (ev === 'error') { - if (this[_errored]) - return - this[_errored] = true - } - return super.emit(ev, data) + return er +} + + +/***/ }), + +/***/ 327: +/***/ ((module) => { + +module.exports = collect + +function collect (stream) { + if (stream._collected) return + + if (stream._paused) return stream.on('resume', collect.bind(null, stream)) + + stream._collected = true + stream.pause() + + stream.on('data', save) + stream.on('end', save) + var buf = [] + function save (b) { + if (typeof b === 'string') b = new Buffer(b) + if (Buffer.isBuffer(b) && !b.length) return + buf.push(b) } - - get fd () { return this[_fd] } - get path () { return this[_path] } - - [_onerror] (er) { - this[_close]() - this[_writing] = true - this.emit('error', er) + stream.on('entry', saveEntry) + var entryBuffer = [] + function saveEntry (e) { + collect(e) + entryBuffer.push(e) } - [_open] () { - fs.open(this[_path], this[_flags], this[_mode], - (er, fd) => this[_onopen](er, fd)) + stream.on('proxy', proxyPause) + function proxyPause (p) { + p.pause() } - [_onopen] (er, fd) { - if (this[_defaultFlag] && - this[_flags] === 'r+' && - er && er.code === 'ENOENT') { - this[_flags] = 'w' - this[_open]() - } else if (er) - this[_onerror](er) - else { - this[_fd] = fd - this.emit('open', fd) - this[_flush]() - } - } + // replace the pipe method with a new version that will + // unlock the buffered stuff. if you just call .pipe() + // without a destination, then it'll re-play the events. + stream.pipe = (function (orig) { + return function (dest) { + // console.error(' === open the pipes', dest && dest.path) - end (buf, enc) { - if (buf) - this.write(buf, enc) + // let the entries flow through one at a time. + // Once they're all done, then we can resume completely. + var e = 0 + ;(function unblockEntry () { + var entry = entryBuffer[e++] + // console.error(" ==== unblock entry", entry && entry.path) + if (!entry) return resume() + entry.on('end', unblockEntry) + if (dest) dest.add(entry) + else stream.emit('entry', entry) + })() - this[_ended] = true + function resume () { + stream.removeListener('entry', saveEntry) + stream.removeListener('data', save) + stream.removeListener('end', save) - // synthetic after-write logic, where drain/finish live - if (!this[_writing] && !this[_queue].length && - typeof this[_fd] === 'number') - this[_onwrite](null, 0) - return this - } + stream.pipe = orig + if (dest) stream.pipe(dest) - write (buf, enc) { - if (typeof buf === 'string') - buf = Buffer.from(buf, enc) + buf.forEach(function (b) { + if (b) stream.emit('data', b) + else stream.emit('end') + }) - if (this[_ended]) { - this.emit('error', new Error('write() after end()')) - return false - } + stream.resume() + } - if (this[_fd] === null || this[_writing] || this[_queue].length) { - this[_queue].push(buf) - this[_needDrain] = true - return false + return dest } + })(stream.pipe) +} - this[_writing] = true - this[_write](buf) - return true - } - [_write] (buf) { - fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => - this[_onwrite](er, bw)) - } +/***/ }), - [_onwrite] (er, bw) { - if (er) - this[_onerror](er) - else { - if (this[_pos] !== null) - this[_pos] += bw - if (this[_queue].length) - this[_flush]() - else { - this[_writing] = false +/***/ 2983: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (this[_ended] && !this[_finished]) { - this[_finished] = true - this[_close]() - this.emit('finish') - } else if (this[_needDrain]) { - this[_needDrain] = false - this.emit('drain') - } - } - } - } +// A thing that emits "entry" events with Reader objects +// Pausing it causes it to stop emitting entry events, and also +// pauses the current entry if there is one. - [_flush] () { - if (this[_queue].length === 0) { - if (this[_ended]) - this[_onwrite](null, 0) - } else if (this[_queue].length === 1) - this[_write](this[_queue].pop()) - else { - const iovec = this[_queue] - this[_queue] = [] - writev(this[_fd], iovec, this[_pos], - (er, bw) => this[_onwrite](er, bw)) - } - } +module.exports = DirReader - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) - } - } -} +var fs = __nccwpck_require__(3273) +var inherits = __nccwpck_require__(445) +var path = __nccwpck_require__(1017) +var Reader = __nccwpck_require__(4116) +var assert = (__nccwpck_require__(9491).ok) -class WriteStreamSync extends WriteStream { - [_open] () { - let fd - // only wrap in a try{} block if we know we'll retry, to avoid - // the rethrow obscuring the error's source frame in most cases. - if (this[_defaultFlag] && this[_flags] === 'r+') { - try { - fd = fs.openSync(this[_path], this[_flags], this[_mode]) - } catch (er) { - if (er.code === 'ENOENT') { - this[_flags] = 'w' - return this[_open]() - } else - throw er - } - } else - fd = fs.openSync(this[_path], this[_flags], this[_mode]) +inherits(DirReader, Reader) - this[_onopen](null, fd) +function DirReader (props) { + var self = this + if (!(self instanceof DirReader)) { + throw new Error('DirReader must be called as constructor.') } - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.closeSync(fd) - this.emit('close') - } + // should already be established as a Directory type + if (props.type !== 'Directory' || !props.Directory) { + throw new Error('Non-directory type ' + props.type) } - [_write] (buf) { - // throw the original, but try to close if it fails - let threw = true - try { - this[_onwrite](null, - fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) - threw = false - } finally { - if (threw) - try { this[_close]() } catch (_) {} - } + self.entries = null + self._index = -1 + self._paused = false + self._length = -1 + + if (props.sort) { + this.sort = props.sort } -} -exports.ReadStream = ReadStream -exports.ReadStreamSync = ReadStreamSync + Reader.call(this, props) +} -exports.WriteStream = WriteStream -exports.WriteStreamSync = WriteStreamSync +DirReader.prototype._getEntries = function () { + var self = this + // race condition. might pause() before calling _getEntries, + // and then resume, and try to get them a second time. + if (self._gotEntries) return + self._gotEntries = true -/***/ }), + fs.readdir(self._path, function (er, entries) { + if (er) return self.error(er) -/***/ 2505: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + self.entries = entries + self.emit('entries', entries) + if (self._paused) self.once('resume', processEntries) + else processEntries() -const proc = typeof process === 'object' && process ? process : { - stdout: null, - stderr: null, + function processEntries () { + self._length = self.entries.length + if (typeof self.sort === 'function') { + self.entries = self.entries.sort(self.sort.bind(self)) + } + self._read() + } + }) } -const EE = __nccwpck_require__(2361) -const Stream = __nccwpck_require__(2781) -const SD = (__nccwpck_require__(1576).StringDecoder) - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' - -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) -class Pipe { - constructor (src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe () { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors () {} - end () { - this.unpipe() - if (this.opts.end) - this.dest.end() - } -} +// start walking the dir, and emit an "entry" event for each one. +DirReader.prototype._read = function () { + var self = this -class PipeProxyErrors extends Pipe { - unpipe () { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor (src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} + if (!self.entries) return self._getEntries() -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = [] - this.buffer = [] - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[ASYNC] = options && !!options.async || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false + if (self._paused || self._currentEntry || self._aborted) { + // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted) + return } - get bufferLength () { return this[BUFFERLENGTH] } - - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') - - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) + self._index++ + if (self._index >= self.entries.length) { + if (!self._ended) { + self._ended = true + self.emit('end') + self.emit('close') } - - this[ENCODING] = enc - } - - setEncoding (enc) { - this.encoding = enc + return } - get objectMode () { return this[OBJECTMODE] } - set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } + // ok, handle this one, then. - get ['async'] () { return this[ASYNC] } - set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } + // save creating a proxy, by stat'ing the thing now. + var p = path.resolve(self._path, self.entries[self._index]) + assert(p !== self._path) + assert(self.entries[self._index]) - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') + // set this to prevent trying to _read() again in the stat time. + self._currentEntry = p + fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) { + if (er) return self.error(er) - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } + var who = self._proxy || self - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' + stat.path = p + stat.basename = path.basename(p) + stat.dirname = path.dirname(p) + var childProps = self.getChildProps.call(who, stat) + childProps.path = p + childProps.basename = path.basename(p) + childProps.dirname = path.dirname(p) - if (!encoding) - encoding = 'utf8' + var entry = Reader(childProps, stat) - const fn = this[ASYNC] ? defer : f => f() + // console.error("DR Entry", p, stat.size) - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } + self._currentEntry = entry - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) + // "entry" events are for direct entries in a specific dir. + // "child" events are for any and all children at all levels. + // This nomenclature is not completely final. - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) + entry.on('pause', function (who) { + if (!self._paused && !entry._disowned) { + self.pause(who) + } + }) - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') + entry.on('resume', function (who) { + if (self._paused && !entry._disowned) { + self.resume(who) + } + }) - if (cb) - fn(cb) + entry.on('stat', function (props) { + self.emit('_entryStat', entry, props) + if (entry._aborted) return + if (entry._paused) { + entry.once('resume', function () { + self.emit('entryStat', entry, props) + }) + } else self.emit('entryStat', entry, props) + }) - return this.flowing - } + entry.on('ready', function EMITCHILD () { + // console.error("DR emit child", entry._path) + if (self._paused) { + // console.error(" DR emit child - try again later") + // pause the child, and emit the "entry" event once we drain. + // console.error("DR pausing child entry") + entry.pause(self) + return self.once('resume', EMITCHILD) + } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - fn(cb) - return this.flowing - } + // skip over sockets. they can't be piped around properly, + // so there's really no sense even acknowledging them. + // if someone really wants to see them, they can listen to + // the "socket" events. + if (entry.type === 'Socket') { + self.emit('socket', entry) + } else { + self.emitEntry(entry) + } + }) - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) + var ended = false + entry.on('close', onend) + entry.on('disown', onend) + function onend () { + if (ended) return + ended = true + self.emit('childEnd', entry) + self.emit('entryEnd', entry) + self._currentEntry = null + if (!self._paused) { + self._read() + } } - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') + // XXX Remove this. Works in node as of 0.6.2 or so. + // Long filenames should not break stuff. + entry.on('error', function (er) { + if (entry._swallowErrors) { + self.warn(er) + entry.emit('end') + entry.emit('close') + } else { + self.emit('error', er) + } + }) - if (cb) - fn(cb) + // proxy up some events. + ;[ + 'child', + 'childEnd', + 'warn' + ].forEach(function (ev) { + entry.on(ev, self.emit.bind(self, ev)) + }) + }) +} - return this.flowing +DirReader.prototype.disown = function (entry) { + entry.emit('beforeDisown') + entry._disowned = true + entry.parent = entry.root = null + if (entry === this._currentEntry) { + this._currentEntry = null } + entry.emit('disown') +} - read (n) { - if (this[DESTROYED]) - return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } +DirReader.prototype.getChildProps = function () { + return { + depth: this.depth + 1, + root: this.root || this, + parent: this, + follow: this.follow, + filter: this.filter, + sort: this.props.sort, + hardlinks: this.props.hardlinks + } +} - if (this[OBJECTMODE]) - n = null +DirReader.prototype.pause = function (who) { + var self = this + if (self._paused) return + who = who || self + self._paused = true + if (self._currentEntry && self._currentEntry.pause) { + self._currentEntry.pause(who) + } + self.emit('pause', who) +} - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = [this.buffer.join('')] - else - this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])] - } +DirReader.prototype.resume = function (who) { + var self = this + if (!self._paused) return + who = who || self - const ret = this[READ](n || null, this.buffer[0]) - this[MAYBE_EMIT_END]() - return ret + self._paused = false + // console.error('DR Emit Resume', self._path) + self.emit('resume', who) + if (self._paused) { + // console.error('DR Re-paused', self._path) + return } - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer[0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } + if (self._currentEntry) { + if (self._currentEntry.resume) self._currentEntry.resume(who) + } else self._read() +} - this.emit('data', chunk) +DirReader.prototype.emitEntry = function (entry) { + this.emit('entry', entry) + this.emit('child', entry) +} - if (!this.buffer.length && !this[EOF]) - this.emit('drain') - return chunk - } +/***/ }), - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this - } +/***/ 178: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return +// It is expected that, when .add() returns false, the consumer +// of the DirWriter will pause until a "drain" event occurs. Note +// that this is *almost always going to be the case*, unless the +// thing being written is some sort of unsupported type, and thus +// skipped over. - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() - else - this.emit('drain') - } +module.exports = DirWriter - resume () { - return this[RESUME]() - } +var Writer = __nccwpck_require__(989) +var inherits = __nccwpck_require__(445) +var mkdir = __nccwpck_require__(6994) +var path = __nccwpck_require__(1017) +var collect = __nccwpck_require__(327) - pause () { - this[FLOWING] = false - this[PAUSED] = true - } +inherits(DirWriter, Writer) - get destroyed () { - return this[DESTROYED] +function DirWriter (props) { + var self = this + if (!(self instanceof DirWriter)) { + self.error('DirWriter must be called as constructor.', null, true) } - get flowing () { - return this[FLOWING] + // should already be established as a Directory type + if (props.type !== 'Directory' || !props.Directory) { + self.error('Non-directory type ' + props.type + ' ' + + JSON.stringify(props), null, true) } - get paused () { - return this[PAUSED] - } + Writer.call(this, props) +} - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - this.buffer.push(chunk) - } +DirWriter.prototype._create = function () { + var self = this + mkdir(self._path, Writer.dirmode, function (er) { + if (er) return self.error(er) + // ready to start getting entries! + self.ready = true + self.emit('ready') + self._process() + }) +} - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer[0].length - } - return this.buffer.shift() - } +// a DirWriter has an add(entry) method, but its .write() doesn't +// do anything. Why a no-op rather than a throw? Because this +// leaves open the door for writing directory metadata for +// gnu/solaris style dumpdirs. +DirWriter.prototype.write = function () { + return true +} - [FLUSH] (noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) +DirWriter.prototype.end = function () { + this._ended = true + this._process() +} - if (!noDrain && !this.buffer.length && !this[EOF]) - this.emit('drain') - } +DirWriter.prototype.add = function (entry) { + var self = this - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false + // console.error('\tadd', entry._path, '->', self._path) + collect(entry) + if (!self.ready || self._currentEntry) { + self._buffer.push(entry) + return false } - pipe (dest, opts) { - if (this[DESTROYED]) - return + // create a new writer, and pipe the incoming entry into it. + if (self._ended) { + return self.error('add after end') + } - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false - else - opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors + self._buffer.push(entry) + self._process() - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end() - } else { - this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)) - if (this[ASYNC]) - defer(() => this[RESUME]()) - else - this[RESUME]() - } + return this._buffer.length === 0 +} - return dest - } +DirWriter.prototype._process = function () { + var self = this - unpipe (dest) { - const p = this.pipes.find(p => p.dest === dest) - if (p) { - this.pipes.splice(this.pipes.indexOf(p), 1) - p.unpipe() - } - } + // console.error('DW Process p=%j', self._processing, self.basename) - addListener (ev, fn) { - return this.on(ev, fn) - } + if (self._processing) return - on (ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) - defer(() => fn.call(this, this[EMITTED_ERROR])) - else - fn.call(this, this[EMITTED_ERROR]) - } - return ret + var entry = self._buffer.shift() + if (!entry) { + // console.error("DW Drain") + self.emit('drain') + if (self._ended) self._finish() + return } - get emittedEnd () { - return this[EMITTED_END] - } + self._processing = true + // console.error("DW Entry", entry._path) - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false - } - } + self.emit('entry', entry) - emit (ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !data ? false - : this[ASYNC] ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - const ret = super.emit('error', data) - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret + // ok, add this entry + // + // don't allow recursive copying + var p = entry + var pp + do { + pp = p._path || p.path + if (pp === self.root._path || pp === self._path || + (pp && pp.indexOf(self._path) === 0)) { + // console.error('DW Exit (recursive)', entry.basename, self._path) + self._processing = false + if (entry._collected) entry.pipe() + return self._process() } + p = p.parent + } while (p) - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } + // console.error("DW not recursive") - [EMITDATA] (data) { - for (const p of this.pipes) { - if (p.dest.write(data) === false) - this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret + // chop off the entry's root dir, replace with ours + var props = { + parent: self, + root: self.root || self, + type: entry.type, + depth: self.depth + 1 } - [EMITEND] () { - if (this[EMITTED_END]) - return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) - defer(() => this[EMITEND2]()) - else - this[EMITEND2]() + pp = entry._path || entry.path || entry.props.path + if (entry.parent) { + pp = pp.substr(entry.parent._path.length + 1) } + // get rid of any ../../ shenanigans + props.path = path.join(self.path, path.join('/', pp)) - [EMITEND2] () { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this.pipes) { - p.dest.write(data) - } - super.emit('data', data) - } - } + // if i have a filter, the child should inherit it. + props.filter = self.filter - for (const p of this.pipes) { - p.end() + // all the rest of the stuff, copy over from the source. + Object.keys(entry.props).forEach(function (k) { + if (!props.hasOwnProperty(k)) { + props[k] = entry.props[k] } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } + }) - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) - } + // not sure at this point what kind of writer this is. + var child = self._currentChild = new Writer(props) + child.on('ready', function () { + // console.error("DW Child Ready", child.type, child._path) + // console.error(" resuming", entry._path) + entry.pipe(child) + entry.resume() + }) - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) - } + // XXX Make this work in node. + // Long filenames should not break stuff. + child.on('error', function (er) { + if (child._swallowErrors) { + self.warn(er) + child.emit('end') + child.emit('close') + } else { + self.emit('error', er) + } + }) - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) + // we fire _end internally *after* end, so that we don't move on + // until any "end" listeners have had their chance to do stuff. + child.on('close', onend) + var ended = false + function onend () { + if (ended) return + ended = true + // console.error("* DW Child end", child.basename) + self._currentChild = null + self._processing = false + self._process() } +} - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) - if (this[EOF]) - return Promise.resolve({ done: true }) +/***/ }), - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } +/***/ 8331: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return { next } - } +// Basically just a wrapper around an fs.ReadStream - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } - } - return { next } - } +module.exports = FileReader - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this - } +var fs = __nccwpck_require__(3273) +var inherits = __nccwpck_require__(445) +var Reader = __nccwpck_require__(4116) +var EOF = {EOF: true} +var CLOSE = {CLOSE: true} - this[DESTROYED] = true +inherits(FileReader, Reader) - // throw away all buffered data, it's never coming out - this.buffer.length = 0 - this[BUFFERLENGTH] = 0 +function FileReader (props) { + // console.error(" FR create", props.path, props.size, new Error().stack) + var self = this + if (!(self instanceof FileReader)) { + throw new Error('FileReader must be called as constructor.') + } - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() + // should already be established as a File type + // XXX Todo: preserve hardlinks by tracking dev+inode+nlink, + // with a HardLinkReader class. + if (!((props.type === 'Link' && props.Link) || + (props.type === 'File' && props.File))) { + throw new Error('Non-file type ' + props.type) + } - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) + self._buffer = [] + self._bytesEmitted = 0 + Reader.call(self, props) +} - return this - } +FileReader.prototype._getStream = function () { + var self = this + var stream = self._stream = fs.createReadStream(self._path, self.props) - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) + if (self.props.blksize) { + stream.bufferSize = self.props.blksize } -} + stream.on('open', self.emit.bind(self, 'open')) -/***/ }), + stream.on('data', function (c) { + // console.error('\t\t%d %s', c.length, self.basename) + self._bytesEmitted += c.length + // no point saving empty chunks + if (!c.length) { + return + } else if (self._paused || self._buffer.length) { + self._buffer.push(c) + self._read() + } else self.emit('data', c) + }) -/***/ 6863: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + stream.on('end', function () { + if (self._paused || self._buffer.length) { + // console.error('FR Buffering End', self._path) + self._buffer.push(EOF) + self._read() + } else { + self.emit('end') + } -module.exports = realpath -realpath.realpath = realpath -realpath.sync = realpathSync -realpath.realpathSync = realpathSync -realpath.monkeypatch = monkeypatch -realpath.unmonkeypatch = unmonkeypatch + if (self._bytesEmitted !== self.props.size) { + self.error("Didn't get expected byte count\n" + + 'expect: ' + self.props.size + '\n' + + 'actual: ' + self._bytesEmitted) + } + }) -var fs = __nccwpck_require__(7147) -var origRealpath = fs.realpath -var origRealpathSync = fs.realpathSync + stream.on('close', function () { + if (self._paused || self._buffer.length) { + // console.error('FR Buffering Close', self._path) + self._buffer.push(CLOSE) + self._read() + } else { + // console.error('FR close 1', self._path) + self.emit('close') + } + }) -var version = process.version -var ok = /^v[0-5]\./.test(version) -var old = __nccwpck_require__(1734) + stream.on('error', function (e) { + self.emit('error', e) + }) -function newError (er) { - return er && er.syscall === 'realpath' && ( - er.code === 'ELOOP' || - er.code === 'ENOMEM' || - er.code === 'ENAMETOOLONG' - ) + self._read() } -function realpath (p, cache, cb) { - if (ok) { - return origRealpath(p, cache, cb) +FileReader.prototype._read = function () { + var self = this + // console.error('FR _read', self._path) + if (self._paused) { + // console.error('FR _read paused', self._path) + return } - if (typeof cache === 'function') { - cb = cache - cache = null + if (!self._stream) { + // console.error('FR _getStream calling', self._path) + return self._getStream() } - origRealpath(p, cache, function (er, result) { - if (newError(er)) { - old.realpath(p, cache, cb) - } else { - cb(er, result) - } - }) -} -function realpathSync (p, cache) { - if (ok) { - return origRealpathSync(p, cache) - } + // clear out the buffer, if there is one. + if (self._buffer.length) { + // console.error('FR _read has buffer', self._buffer.length, self._path) + var buf = self._buffer + for (var i = 0, l = buf.length; i < l; i++) { + var c = buf[i] + if (c === EOF) { + // console.error('FR Read emitting buffered end', self._path) + self.emit('end') + } else if (c === CLOSE) { + // console.error('FR Read emitting buffered close', self._path) + self.emit('close') + } else { + // console.error('FR Read emitting buffered data', self._path) + self.emit('data', c) + } - try { - return origRealpathSync(p, cache) - } catch (er) { - if (newError(er)) { - return old.realpathSync(p, cache) - } else { - throw er + if (self._paused) { + // console.error('FR Read Re-pausing at '+i, self._path) + self._buffer = buf.slice(i) + return + } } + self._buffer.length = 0 } +// console.error("FR _read done") +// that's about all there is to it. } -function monkeypatch () { - fs.realpath = realpath - fs.realpathSync = realpathSync +FileReader.prototype.pause = function (who) { + var self = this + // console.error('FR Pause', self._path) + if (self._paused) return + who = who || self + self._paused = true + if (self._stream) self._stream.pause() + self.emit('pause', who) } -function unmonkeypatch () { - fs.realpath = origRealpath - fs.realpathSync = origRealpathSync +FileReader.prototype.resume = function (who) { + var self = this + // console.error('FR Resume', self._path) + if (!self._paused) return + who = who || self + self.emit('resume', who) + self._paused = false + if (self._stream) self._stream.resume() + self._read() } /***/ }), -/***/ 1734: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var pathModule = __nccwpck_require__(1017); -var isWindows = process.platform === 'win32'; -var fs = __nccwpck_require__(7147); - -// JavaScript implementation of realpath, ported from node pre-v6 +/***/ 3009: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); +module.exports = FileWriter -function rethrow() { - // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and - // is fairly slow to generate. - var callback; - if (DEBUG) { - var backtrace = new Error; - callback = debugCallback; - } else - callback = missingCallback; +var fs = __nccwpck_require__(3273) +var Writer = __nccwpck_require__(989) +var inherits = __nccwpck_require__(445) +var EOF = {} - return callback; +inherits(FileWriter, Writer) - function debugCallback(err) { - if (err) { - backtrace.message = err.message; - err = backtrace; - missingCallback(err); - } +function FileWriter (props) { + var self = this + if (!(self instanceof FileWriter)) { + throw new Error('FileWriter must be called as constructor.') } - function missingCallback(err) { - if (err) { - if (process.throwDeprecation) - throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs - else if (!process.noDeprecation) { - var msg = 'fs: missing callback ' + (err.stack || err.message); - if (process.traceDeprecation) - console.trace(msg); - else - console.error(msg); - } - } + // should already be established as a File type + if (props.type !== 'File' || !props.File) { + throw new Error('Non-file type ' + props.type) } -} -function maybeCallback(cb) { - return typeof cb === 'function' ? cb : rethrow(); + self._buffer = [] + self._bytesWritten = 0 + + Writer.call(this, props) } -var normalize = pathModule.normalize; +FileWriter.prototype._create = function () { + var self = this + if (self._stream) return -// Regexp that finds the next partion of a (partial) path -// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] -if (isWindows) { - var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; -} else { - var nextPartRe = /(.*?)(?:[\/]+|$)/g; -} + var so = {} + if (self.props.flags) so.flags = self.props.flags + so.mode = Writer.filemode + if (self._old && self._old.blksize) so.bufferSize = self._old.blksize -// Regex to find the device root, including trailing slash. E.g. 'c:\\'. -if (isWindows) { - var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; -} else { - var splitRootRe = /^[\/]*/; -} + self._stream = fs.createWriteStream(self._path, so) -exports.realpathSync = function realpathSync(p, cache) { - // make p is absolute - p = pathModule.resolve(p); + self._stream.on('open', function () { + // console.error("FW open", self._buffer, self._path) + self.ready = true + self._buffer.forEach(function (c) { + if (c === EOF) self._stream.end() + else self._stream.write(c) + }) + self.emit('ready') + // give this a kick just in case it needs it. + self.emit('drain') + }) - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return cache[p]; - } + self._stream.on('error', function (er) { self.emit('error', er) }) - var original = p, - seenLinks = {}, - knownHard = {}; + self._stream.on('drain', function () { self.emit('drain') }) - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; + self._stream.on('close', function () { + // console.error('\n\nFW Stream Close', self._path, self.size) + self._finish() + }) +} - start(); +FileWriter.prototype.write = function (c) { + var self = this - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; + self._bytesWritten += c.length - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstatSync(base); - knownHard[base] = true; + if (!self.ready) { + if (!Buffer.isBuffer(c) && typeof c !== 'string') { + throw new Error('invalid write data') } + self._buffer.push(c) + return false } - // walk down the path, swapping out linked pathparts for their real - // values - // NB: p.length changes. - while (pos < p.length) { - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; + var ret = self._stream.write(c) + // console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length) - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - continue; - } + // allow 2 buffered writes, because otherwise there's just too + // much stop and go bs. + if (ret === false && self._stream._queue) { + return self._stream._queue.length <= 2 + } else { + return ret + } +} - var resolvedLink; - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // some known symbolic link. no need to stat again. - resolvedLink = cache[base]; - } else { - var stat = fs.lstatSync(base); - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - continue; - } +FileWriter.prototype.end = function (c) { + var self = this - // read the link if it wasn't read before - // dev/ino always return 0 on windows, so skip the check. - var linkTarget = null; - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - linkTarget = seenLinks[id]; - } - } - if (linkTarget === null) { - fs.statSync(base); - linkTarget = fs.readlinkSync(base); - } - resolvedLink = pathModule.resolve(previous, linkTarget); - // track this, if given a cache. - if (cache) cache[base] = resolvedLink; - if (!isWindows) seenLinks[id] = linkTarget; - } + if (c) self.write(c) - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); + if (!self.ready) { + self._buffer.push(EOF) + return false } - if (cache) cache[original] = p; - - return p; -}; - + return self._stream.end() +} -exports.realpath = function realpath(p, cache, cb) { - if (typeof cb !== 'function') { - cb = maybeCallback(cache); - cache = null; +FileWriter.prototype._finish = function () { + var self = this + if (typeof self.size === 'number' && self._bytesWritten !== self.size) { + self.error( + 'Did not get expected byte count.\n' + + 'expect: ' + self.size + '\n' + + 'actual: ' + self._bytesWritten) } + Writer.prototype._finish.call(self) +} - // make p is absolute - p = pathModule.resolve(p); - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return process.nextTick(cb.bind(null, null, cache[p])); - } +/***/ }), - var original = p, - seenLinks = {}, - knownHard = {}; +/***/ 9695: +/***/ ((module) => { - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; +module.exports = getType - start(); +function getType (st) { + var types = [ + 'Directory', + 'File', + 'SymbolicLink', + 'Link', // special for hardlinks from tarballs + 'BlockDevice', + 'CharacterDevice', + 'FIFO', + 'Socket' + ] + var type - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; + if (st.type && types.indexOf(st.type) !== -1) { + st[st.type] = true + return st.type + } - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstat(base, function(err) { - if (err) return cb(err); - knownHard[base] = true; - LOOP(); - }); - } else { - process.nextTick(LOOP); + for (var i = 0, l = types.length; i < l; i++) { + type = types[i] + var is = st[type] || st['is' + type] + if (typeof is === 'function') is = is.call(st) + if (is) { + st[type] = true + st.type = type + return type } } - // walk down the path, swapping out linked pathparts for their real - // values - function LOOP() { - // stop if scanned past end of path - if (pos >= p.length) { - if (cache) cache[original] = p; - return cb(null, p); - } + return null +} - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - return process.nextTick(LOOP); - } +/***/ }), + +/***/ 3773: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Basically just a wrapper around an fs.readlink +// +// XXX: Enhance this to support the Link type, by keeping +// a lookup table of {:}, so that hardlinks +// can be preserved in tarballs. + +module.exports = LinkReader - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // known symbolic link. no need to stat again. - return gotResolvedLink(cache[base]); - } +var fs = __nccwpck_require__(3273) +var inherits = __nccwpck_require__(445) +var Reader = __nccwpck_require__(4116) - return fs.lstat(base, gotStat); +inherits(LinkReader, Reader) + +function LinkReader (props) { + var self = this + if (!(self instanceof LinkReader)) { + throw new Error('LinkReader must be called as constructor.') } - function gotStat(err, stat) { - if (err) return cb(err); + if (!((props.type === 'Link' && props.Link) || + (props.type === 'SymbolicLink' && props.SymbolicLink))) { + throw new Error('Non-link type ' + props.type) + } - // if not a symlink, skip to the next path part - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - return process.nextTick(LOOP); - } + Reader.call(self, props) +} - // stat & read the link if not read before - // call gotTarget as soon as the link target is known - // dev/ino always return 0 on windows, so skip the check. - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - return gotTarget(null, seenLinks[id], base); - } - } - fs.stat(base, function(err) { - if (err) return cb(err); +// When piping a LinkReader into a LinkWriter, we have to +// already have the linkpath property set, so that has to +// happen *before* the "ready" event, which means we need to +// override the _stat method. +LinkReader.prototype._stat = function (currentStat) { + var self = this + fs.readlink(self._path, function (er, linkpath) { + if (er) return self.error(er) + self.linkpath = self.props.linkpath = linkpath + self.emit('linkpath', linkpath) + Reader.prototype._stat.call(self, currentStat) + }) +} - fs.readlink(base, function(err, target) { - if (!isWindows) seenLinks[id] = target; - gotTarget(err, target); - }); - }); +LinkReader.prototype._read = function () { + var self = this + if (self._paused) return + // basically just a no-op, since we got all the info we need + // from the _stat method + if (!self._ended) { + self.emit('end') + self.emit('close') + self._ended = true } +} - function gotTarget(err, target, base) { - if (err) return cb(err); - var resolvedLink = pathModule.resolve(previous, target); - if (cache) cache[base] = resolvedLink; - gotResolvedLink(resolvedLink); +/***/ }), + +/***/ 147: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = LinkWriter + +var fs = __nccwpck_require__(3273) +var Writer = __nccwpck_require__(989) +var inherits = __nccwpck_require__(445) +var path = __nccwpck_require__(1017) +var rimraf = __nccwpck_require__(5060) + +inherits(LinkWriter, Writer) + +function LinkWriter (props) { + var self = this + if (!(self instanceof LinkWriter)) { + throw new Error('LinkWriter must be called as constructor.') } - function gotResolvedLink(resolvedLink) { - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); + // should already be established as a Link type + if (!((props.type === 'Link' && props.Link) || + (props.type === 'SymbolicLink' && props.SymbolicLink))) { + throw new Error('Non-link type ' + props.type) } -}; + if (props.linkpath === '') props.linkpath = '.' + if (!props.linkpath) { + self.error('Need linkpath property to create ' + props.type) + } -/***/ }), + Writer.call(this, props) +} -/***/ 7158: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +LinkWriter.prototype._create = function () { + // console.error(" LW _create") + var self = this + var hard = self.type === 'Link' || process.platform === 'win32' + var link = hard ? 'link' : 'symlink' + var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath -var __webpack_unused_export__; -/* unused reexport */ __nccwpck_require__(9479) -/* unused reexport */ __nccwpck_require__(3284) -exports.Writer = __nccwpck_require__(8680) + // can only change the link path by clobbering + // For hard links, let's just assume that's always the case, since + // there's no good way to read them if we don't already know. + if (hard) return clobber(self, lp, link) -exports.$B = { - Reader: __nccwpck_require__(8413), - Writer: __nccwpck_require__(2539) + fs.readlink(self._path, function (er, p) { + // only skip creation if it's exactly the same link + if (p && p === lp) return finish(self) + clobber(self, lp, link) + }) } -exports.Lv = { - Reader: __nccwpck_require__(4486), - Writer: __nccwpck_require__(4745) +function clobber (self, lp, link) { + rimraf(self._path, function (er) { + if (er) return self.error(er) + create(self, lp, link) + }) } -exports.rU = { - Reader: __nccwpck_require__(8337), - Writer: __nccwpck_require__(404) +function create (self, lp, link) { + fs[link](lp, self._path, function (er) { + // if this is a hard link, and we're in the process of writing out a + // directory, it's very possible that the thing we're linking to + // doesn't exist yet (especially if it was intended as a symlink), + // so swallow ENOENT errors here and just soldier in. + // Additionally, an EPERM or EACCES can happen on win32 if it's trying + // to make a link to a directory. Again, just skip it. + // A better solution would be to have fs.symlink be supported on + // windows in some nice fashion. + if (er) { + if ((er.code === 'ENOENT' || + er.code === 'EACCES' || + er.code === 'EPERM') && process.platform === 'win32') { + self.ready = true + self.emit('ready') + self.emit('end') + self.emit('close') + self.end = self._finish = function () {} + } else return self.error(er) + } + finish(self) + }) } -exports._S = { - Reader: __nccwpck_require__(7328), - Writer: __nccwpck_require__(2071) +function finish (self) { + self.ready = true + self.emit('ready') + if (self._ended && !self._finished) self._finish() } -__webpack_unused_export__ = __webpack_unused_export__ = exports.Lv.Reader -__webpack_unused_export__ = __webpack_unused_export__ = exports.$B.Reader -__webpack_unused_export__ = __webpack_unused_export__ = exports.rU.Reader -__webpack_unused_export__ = __webpack_unused_export__ = exports._S.Reader - -exports.Writer.Dir = __webpack_unused_export__ = exports.Lv.Writer -exports.Writer.File = __webpack_unused_export__ = exports.$B.Writer -exports.Writer.Link = __webpack_unused_export__ = exports.rU.Writer -exports.Writer.Proxy = __webpack_unused_export__ = exports._S.Writer - -/* unused reexport */ __nccwpck_require__(3317) +LinkWriter.prototype.end = function () { + // console.error("LW finish in end") + this._ended = true + if (this.ready) { + this._finished = true + this._finish() + } +} /***/ }), -/***/ 9479: +/***/ 7620: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// the parent class for all fstreams. - -module.exports = Abstract +// A reader for when we don't yet know what kind of thing +// the thing is. -var Stream = (__nccwpck_require__(2781).Stream) -var inherits = __nccwpck_require__(4124) +module.exports = ProxyReader -function Abstract () { - Stream.call(this) -} +var Reader = __nccwpck_require__(4116) +var getType = __nccwpck_require__(9695) +var inherits = __nccwpck_require__(445) +var fs = __nccwpck_require__(3273) -inherits(Abstract, Stream) +inherits(ProxyReader, Reader) -Abstract.prototype.on = function (ev, fn) { - if (ev === 'ready' && this.ready) { - process.nextTick(fn.bind(this)) - } else { - Stream.prototype.on.call(this, ev, fn) +function ProxyReader (props) { + var self = this + if (!(self instanceof ProxyReader)) { + throw new Error('ProxyReader must be called as constructor.') } - return this -} -Abstract.prototype.abort = function () { - this._aborted = true - this.emit('abort') + self.props = props + self._buffer = [] + self.ready = false + + Reader.call(self, props) } -Abstract.prototype.destroy = function () {} +ProxyReader.prototype._stat = function () { + var self = this + var props = self.props + // stat the thing to see what the proxy should be. + var stat = props.follow ? 'stat' : 'lstat' -Abstract.prototype.warn = function (msg, code) { + fs[stat](props.path, function (er, current) { + var type + if (er || !current) { + type = 'File' + } else { + type = getType(current) + } + + props[type] = true + props.type = self.type = type + + self._old = current + self._addProxy(Reader(props, current)) + }) +} + +ProxyReader.prototype._addProxy = function (proxy) { var self = this - var er = decorate(msg, code, self) - if (!self.listeners('warn')) { - console.error('%s %s\n' + - 'path = %s\n' + - 'syscall = %s\n' + - 'fstream_type = %s\n' + - 'fstream_path = %s\n' + - 'fstream_unc_path = %s\n' + - 'fstream_class = %s\n' + - 'fstream_stack =\n%s\n', - code || 'UNKNOWN', - er.stack, - er.path, - er.syscall, - er.fstream_type, - er.fstream_path, - er.fstream_unc_path, - er.fstream_class, - er.fstream_stack.join('\n')) - } else { - self.emit('warn', er) + if (self._proxyTarget) { + return self.error('proxy already set') } -} -Abstract.prototype.info = function (msg, code) { - this.emit('info', msg, code) -} + self._proxyTarget = proxy + proxy._proxy = self -Abstract.prototype.error = function (msg, code, th) { - var er = decorate(msg, code, this) - if (th) throw er - else this.emit('error', er) + ;[ + 'error', + 'data', + 'end', + 'close', + 'linkpath', + 'entry', + 'entryEnd', + 'child', + 'childEnd', + 'warn', + 'stat' + ].forEach(function (ev) { + // console.error('~~ proxy event', ev, self.path) + proxy.on(ev, self.emit.bind(self, ev)) + }) + + self.emit('proxy', proxy) + + proxy.on('ready', function () { + // console.error("~~ proxy is ready!", self.path) + self.ready = true + self.emit('ready') + }) + + var calls = self._buffer + self._buffer.length = 0 + calls.forEach(function (c) { + proxy[c[0]].apply(proxy, c[1]) + }) } -function decorate (er, code, self) { - if (!(er instanceof Error)) er = new Error(er) - er.code = er.code || code - er.path = er.path || self.path - er.fstream_type = er.fstream_type || self.type - er.fstream_path = er.fstream_path || self.path - if (self._path !== self.path) { - er.fstream_unc_path = er.fstream_unc_path || self._path - } - if (self.linkpath) { - er.fstream_linkpath = er.fstream_linkpath || self.linkpath - } - er.fstream_class = er.fstream_class || self.constructor.name - er.fstream_stack = er.fstream_stack || - new Error().stack.split(/\n/).slice(3).map(function (s) { - return s.replace(/^ {4}at /, '') - }) +ProxyReader.prototype.pause = function () { + return this._proxyTarget ? this._proxyTarget.pause() : false +} - return er +ProxyReader.prototype.resume = function () { + return this._proxyTarget ? this._proxyTarget.resume() : false } /***/ }), -/***/ 3317: -/***/ ((module) => { +/***/ 4943: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = collect +// A writer for when we don't know what kind of thing +// the thing is. That is, it's not explicitly set, +// so we're going to make it whatever the thing already +// is, or "File" +// +// Until then, collect all events. -function collect (stream) { - if (stream._collected) return +module.exports = ProxyWriter - if (stream._paused) return stream.on('resume', collect.bind(null, stream)) +var Writer = __nccwpck_require__(989) +var getType = __nccwpck_require__(9695) +var inherits = __nccwpck_require__(445) +var collect = __nccwpck_require__(327) +var fs = __nccwpck_require__(7147) - stream._collected = true - stream.pause() +inherits(ProxyWriter, Writer) - stream.on('data', save) - stream.on('end', save) - var buf = [] - function save (b) { - if (typeof b === 'string') b = new Buffer(b) - if (Buffer.isBuffer(b) && !b.length) return - buf.push(b) +function ProxyWriter (props) { + var self = this + if (!(self instanceof ProxyWriter)) { + throw new Error('ProxyWriter must be called as constructor.') } - stream.on('entry', saveEntry) - var entryBuffer = [] - function saveEntry (e) { - collect(e) - entryBuffer.push(e) - } + self.props = props + self._needDrain = false - stream.on('proxy', proxyPause) - function proxyPause (p) { - p.pause() + Writer.call(self, props) +} + +ProxyWriter.prototype._stat = function () { + var self = this + var props = self.props + // stat the thing to see what the proxy should be. + var stat = props.follow ? 'stat' : 'lstat' + + fs[stat](props.path, function (er, current) { + var type + if (er || !current) { + type = 'File' + } else { + type = getType(current) + } + + props[type] = true + props.type = self.type = type + + self._old = current + self._addProxy(Writer(props, current)) + }) +} + +ProxyWriter.prototype._addProxy = function (proxy) { + // console.error("~~ set proxy", this.path) + var self = this + if (self._proxy) { + return self.error('proxy already set') } - // replace the pipe method with a new version that will - // unlock the buffered stuff. if you just call .pipe() - // without a destination, then it'll re-play the events. - stream.pipe = (function (orig) { - return function (dest) { - // console.error(' === open the pipes', dest && dest.path) + self._proxy = proxy + ;[ + 'ready', + 'error', + 'close', + 'pipe', + 'drain', + 'warn' + ].forEach(function (ev) { + proxy.on(ev, self.emit.bind(self, ev)) + }) - // let the entries flow through one at a time. - // Once they're all done, then we can resume completely. - var e = 0 - ;(function unblockEntry () { - var entry = entryBuffer[e++] - // console.error(" ==== unblock entry", entry && entry.path) - if (!entry) return resume() - entry.on('end', unblockEntry) - if (dest) dest.add(entry) - else stream.emit('entry', entry) - })() + self.emit('proxy', proxy) - function resume () { - stream.removeListener('entry', saveEntry) - stream.removeListener('data', save) - stream.removeListener('end', save) + var calls = self._buffer + calls.forEach(function (c) { + // console.error("~~ ~~ proxy buffered call", c[0], c[1]) + proxy[c[0]].apply(proxy, c[1]) + }) + self._buffer.length = 0 + if (self._needsDrain) self.emit('drain') +} - stream.pipe = orig - if (dest) stream.pipe(dest) +ProxyWriter.prototype.add = function (entry) { + // console.error("~~ proxy add") + collect(entry) - buf.forEach(function (b) { - if (b) stream.emit('data', b) - else stream.emit('end') - }) + if (!this._proxy) { + this._buffer.push(['add', [entry]]) + this._needDrain = true + return false + } + return this._proxy.add(entry) +} - stream.resume() - } +ProxyWriter.prototype.write = function (c) { + // console.error('~~ proxy write') + if (!this._proxy) { + this._buffer.push(['write', [c]]) + this._needDrain = true + return false + } + return this._proxy.write(c) +} - return dest - } - })(stream.pipe) +ProxyWriter.prototype.end = function (c) { + // console.error('~~ proxy end') + if (!this._proxy) { + this._buffer.push(['end', [c]]) + return false + } + return this._proxy.end(c) } /***/ }), -/***/ 4486: +/***/ 4116: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// A thing that emits "entry" events with Reader objects -// Pausing it causes it to stop emitting entry events, and also -// pauses the current entry if there is one. - -module.exports = DirReader +module.exports = Reader -var fs = __nccwpck_require__(7758) -var inherits = __nccwpck_require__(4124) +var fs = __nccwpck_require__(3273) +var Stream = (__nccwpck_require__(2781).Stream) +var inherits = __nccwpck_require__(445) var path = __nccwpck_require__(1017) -var Reader = __nccwpck_require__(3284) -var assert = (__nccwpck_require__(9491).ok) - -inherits(DirReader, Reader) +var getType = __nccwpck_require__(9695) +var hardLinks = Reader.hardLinks = {} +var Abstract = __nccwpck_require__(3652) -function DirReader (props) { - var self = this - if (!(self instanceof DirReader)) { - throw new Error('DirReader must be called as constructor.') - } +// Must do this *before* loading the child classes +inherits(Reader, Abstract) - // should already be established as a Directory type - if (props.type !== 'Directory' || !props.Directory) { - throw new Error('Non-directory type ' + props.type) - } +var LinkReader = __nccwpck_require__(3773) - self.entries = null - self._index = -1 - self._paused = false - self._length = -1 +function Reader (props, currentStat) { + var self = this + if (!(self instanceof Reader)) return new Reader(props, currentStat) - if (props.sort) { - this.sort = props.sort + if (typeof props === 'string') { + props = { path: props } } - Reader.call(this, props) -} + // polymorphism. + // call fstream.Reader(dir) to get a DirReader object, etc. + // Note that, unlike in the Writer case, ProxyReader is going + // to be the *normal* state of affairs, since we rarely know + // the type of a file prior to reading it. -DirReader.prototype._getEntries = function () { - var self = this + var type + var ClassType - // race condition. might pause() before calling _getEntries, - // and then resume, and try to get them a second time. - if (self._gotEntries) return - self._gotEntries = true + if (props.type && typeof props.type === 'function') { + type = props.type + ClassType = type + } else { + type = getType(props) + ClassType = Reader + } - fs.readdir(self._path, function (er, entries) { - if (er) return self.error(er) + if (currentStat && !type) { + type = getType(currentStat) + props[type] = true + props.type = type + } - self.entries = entries + switch (type) { + case 'Directory': + ClassType = __nccwpck_require__(2983) + break - self.emit('entries', entries) - if (self._paused) self.once('resume', processEntries) - else processEntries() + case 'Link': + // XXX hard links are just files. + // However, it would be good to keep track of files' dev+inode + // and nlink values, and create a HardLinkReader that emits + // a linkpath value of the original copy, so that the tar + // writer can preserve them. + // ClassType = HardLinkReader + // break - function processEntries () { - self._length = self.entries.length - if (typeof self.sort === 'function') { - self.entries = self.entries.sort(self.sort.bind(self)) - } - self._read() - } - }) -} + case 'File': + ClassType = __nccwpck_require__(8331) + break -// start walking the dir, and emit an "entry" event for each one. -DirReader.prototype._read = function () { - var self = this + case 'SymbolicLink': + ClassType = LinkReader + break - if (!self.entries) return self._getEntries() + case 'Socket': + ClassType = __nccwpck_require__(9630) + break - if (self._paused || self._currentEntry || self._aborted) { - // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted) - return + case null: + ClassType = __nccwpck_require__(7620) + break } - self._index++ - if (self._index >= self.entries.length) { - if (!self._ended) { - self._ended = true - self.emit('end') - self.emit('close') - } - return + if (!(self instanceof ClassType)) { + return new ClassType(props) } - // ok, handle this one, then. + Abstract.call(self) - // save creating a proxy, by stat'ing the thing now. - var p = path.resolve(self._path, self.entries[self._index]) - assert(p !== self._path) - assert(self.entries[self._index]) + if (!props.path) { + self.error('Must provide a path', null, true) + } - // set this to prevent trying to _read() again in the stat time. - self._currentEntry = p - fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) { - if (er) return self.error(er) + self.readable = true + self.writable = false - var who = self._proxy || self + self.type = type + self.props = props + self.depth = props.depth = props.depth || 0 + self.parent = props.parent || null + self.root = props.root || (props.parent && props.parent.root) || self - stat.path = p - stat.basename = path.basename(p) - stat.dirname = path.dirname(p) - var childProps = self.getChildProps.call(who, stat) - childProps.path = p - childProps.basename = path.basename(p) - childProps.dirname = path.dirname(p) + self._path = self.path = path.resolve(props.path) + if (process.platform === 'win32') { + self.path = self._path = self.path.replace(/\?/g, '_') + if (self._path.length >= 260) { + // how DOES one create files on the moon? + // if the path has spaces in it, then UNC will fail. + self._swallowErrors = true + // if (self._path.indexOf(" ") === -1) { + self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') + // } + } + } + self.basename = props.basename = path.basename(self.path) + self.dirname = props.dirname = path.dirname(self.path) - var entry = Reader(childProps, stat) + // these have served their purpose, and are now just noisy clutter + props.parent = props.root = null - // console.error("DR Entry", p, stat.size) + // console.error("\n\n\n%s setting size to", props.path, props.size) + self.size = props.size + self.filter = typeof props.filter === 'function' ? props.filter : null + if (props.sort === 'alpha') props.sort = alphasort - self._currentEntry = entry + // start the ball rolling. + // this will stat the thing, and then call self._read() + // to start reading whatever it is. + // console.error("calling stat", props.path, currentStat) + self._stat(currentStat) +} - // "entry" events are for direct entries in a specific dir. - // "child" events are for any and all children at all levels. - // This nomenclature is not completely final. +function alphasort (a, b) { + return a === b ? 0 + : a.toLowerCase() > b.toLowerCase() ? 1 + : a.toLowerCase() < b.toLowerCase() ? -1 + : a > b ? 1 + : -1 +} - entry.on('pause', function (who) { - if (!self._paused && !entry._disowned) { - self.pause(who) - } - }) +Reader.prototype._stat = function (currentStat) { + var self = this + var props = self.props + var stat = props.follow ? 'stat' : 'lstat' + // console.error("Reader._stat", self._path, currentStat) + if (currentStat) process.nextTick(statCb.bind(null, null, currentStat)) + else fs[stat](self._path, statCb) - entry.on('resume', function (who) { - if (self._paused && !entry._disowned) { - self.resume(who) - } - }) + function statCb (er, props_) { + // console.error("Reader._stat, statCb", self._path, props_, props_.nlink) + if (er) return self.error(er) - entry.on('stat', function (props) { - self.emit('_entryStat', entry, props) - if (entry._aborted) return - if (entry._paused) { - entry.once('resume', function () { - self.emit('entryStat', entry, props) - }) - } else self.emit('entryStat', entry, props) + Object.keys(props_).forEach(function (k) { + props[k] = props_[k] }) - entry.on('ready', function EMITCHILD () { - // console.error("DR emit child", entry._path) - if (self._paused) { - // console.error(" DR emit child - try again later") - // pause the child, and emit the "entry" event once we drain. - // console.error("DR pausing child entry") - entry.pause(self) - return self.once('resume', EMITCHILD) - } + // if it's not the expected size, then abort here. + if (undefined !== self.size && props.size !== self.size) { + return self.error('incorrect size') + } + self.size = props.size - // skip over sockets. they can't be piped around properly, - // so there's really no sense even acknowledging them. - // if someone really wants to see them, they can listen to - // the "socket" events. - if (entry.type === 'Socket') { - self.emit('socket', entry) + var type = getType(props) + var handleHardlinks = props.hardlinks !== false + + // special little thing for handling hardlinks. + if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) { + var k = props.dev + ':' + props.ino + // console.error("Reader has nlink", self._path, k) + if (hardLinks[k] === self._path || !hardLinks[k]) { + hardLinks[k] = self._path } else { - self.emitEntry(entry) + // switch into hardlink mode. + type = self.type = self.props.type = 'Link' + self.Link = self.props.Link = true + self.linkpath = self.props.linkpath = hardLinks[k] + // console.error("Hardlink detected, switching mode", self._path, self.linkpath) + // Setting __proto__ would arguably be the "correct" + // approach here, but that just seems too wrong. + self._stat = self._read = LinkReader.prototype._read } - }) + } - var ended = false - entry.on('close', onend) - entry.on('disown', onend) - function onend () { - if (ended) return - ended = true - self.emit('childEnd', entry) - self.emit('entryEnd', entry) - self._currentEntry = null - if (!self._paused) { - self._read() - } + if (self.type && self.type !== type) { + self.error('Unexpected type: ' + type) } - // XXX Remove this. Works in node as of 0.6.2 or so. - // Long filenames should not break stuff. - entry.on('error', function (er) { - if (entry._swallowErrors) { - self.warn(er) - entry.emit('end') - entry.emit('close') - } else { - self.emit('error', er) + // if the filter doesn't pass, then just skip over this one. + // still have to emit end so that dir-walking can move on. + if (self.filter) { + var who = self._proxy || self + // special handling for ProxyReaders + if (!self.filter.call(who, who, props)) { + if (!self._disowned) { + self.abort() + self.emit('end') + self.emit('close') + } + return } - }) + } - // proxy up some events. - ;[ - 'child', - 'childEnd', - 'warn' - ].forEach(function (ev) { - entry.on(ev, self.emit.bind(self, ev)) - }) - }) -} + // last chance to abort or disown before the flow starts! + var events = ['_stat', 'stat', 'ready'] + var e = 0 + ;(function go () { + if (self._aborted) { + self.emit('end') + self.emit('close') + return + } -DirReader.prototype.disown = function (entry) { - entry.emit('beforeDisown') - entry._disowned = true - entry.parent = entry.root = null - if (entry === this._currentEntry) { - this._currentEntry = null - } - entry.emit('disown') -} + if (self._paused && self.type !== 'Directory') { + self.once('resume', go) + return + } -DirReader.prototype.getChildProps = function () { - return { - depth: this.depth + 1, - root: this.root || this, - parent: this, - follow: this.follow, - filter: this.filter, - sort: this.props.sort, - hardlinks: this.props.hardlinks + var ev = events[e++] + if (!ev) { + return self._read() + } + self.emit(ev, props) + go() + })() } } -DirReader.prototype.pause = function (who) { +Reader.prototype.pipe = function (dest) { var self = this - if (self._paused) return - who = who || self - self._paused = true - if (self._currentEntry && self._currentEntry.pause) { - self._currentEntry.pause(who) + if (typeof dest.add === 'function') { + // piping to a multi-compatible, and we've got directory entries. + self.on('entry', function (entry) { + var ret = dest.add(entry) + if (ret === false) { + self.pause() + } + }) } - self.emit('pause', who) -} -DirReader.prototype.resume = function (who) { - var self = this - if (!self._paused) return - who = who || self + // console.error("R Pipe apply Stream Pipe") + return Stream.prototype.pipe.apply(this, arguments) +} - self._paused = false - // console.error('DR Emit Resume', self._path) - self.emit('resume', who) - if (self._paused) { - // console.error('DR Re-paused', self._path) - return - } +Reader.prototype.pause = function (who) { + this._paused = true + who = who || this + this.emit('pause', who) + if (this._stream) this._stream.pause(who) +} - if (self._currentEntry) { - if (self._currentEntry.resume) self._currentEntry.resume(who) - } else self._read() +Reader.prototype.resume = function (who) { + this._paused = false + who = who || this + this.emit('resume', who) + if (this._stream) this._stream.resume(who) + this._read() } -DirReader.prototype.emitEntry = function (entry) { - this.emit('entry', entry) - this.emit('child', entry) +Reader.prototype._read = function () { + this.error('Cannot read unknown type: ' + this.type) } /***/ }), -/***/ 4745: +/***/ 9630: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// It is expected that, when .add() returns false, the consumer -// of the DirWriter will pause until a "drain" event occurs. Note -// that this is *almost always going to be the case*, unless the -// thing being written is some sort of unsupported type, and thus -// skipped over. +// Just get the stats, and then don't do anything. +// You can't really "read" from a socket. You "connect" to it. +// Mostly, this is here so that reading a dir with a socket in it +// doesn't blow up. -module.exports = DirWriter +module.exports = SocketReader -var Writer = __nccwpck_require__(8680) -var inherits = __nccwpck_require__(4124) -var mkdir = __nccwpck_require__(7812) -var path = __nccwpck_require__(1017) -var collect = __nccwpck_require__(3317) +var inherits = __nccwpck_require__(445) +var Reader = __nccwpck_require__(4116) -inherits(DirWriter, Writer) +inherits(SocketReader, Reader) -function DirWriter (props) { +function SocketReader (props) { var self = this - if (!(self instanceof DirWriter)) { - self.error('DirWriter must be called as constructor.', null, true) + if (!(self instanceof SocketReader)) { + throw new Error('SocketReader must be called as constructor.') } - // should already be established as a Directory type - if (props.type !== 'Directory' || !props.Directory) { - self.error('Non-directory type ' + props.type + ' ' + - JSON.stringify(props), null, true) + if (!(props.type === 'Socket' && props.Socket)) { + throw new Error('Non-socket type ' + props.type) } - Writer.call(this, props) -} - -DirWriter.prototype._create = function () { - var self = this - mkdir(self._path, Writer.dirmode, function (er) { - if (er) return self.error(er) - // ready to start getting entries! - self.ready = true - self.emit('ready') - self._process() - }) -} - -// a DirWriter has an add(entry) method, but its .write() doesn't -// do anything. Why a no-op rather than a throw? Because this -// leaves open the door for writing directory metadata for -// gnu/solaris style dumpdirs. -DirWriter.prototype.write = function () { - return true -} - -DirWriter.prototype.end = function () { - this._ended = true - this._process() + Reader.call(self, props) } -DirWriter.prototype.add = function (entry) { +SocketReader.prototype._read = function () { var self = this - - // console.error('\tadd', entry._path, '->', self._path) - collect(entry) - if (!self.ready || self._currentEntry) { - self._buffer.push(entry) - return false - } - - // create a new writer, and pipe the incoming entry into it. - if (self._ended) { - return self.error('add after end') + if (self._paused) return + // basically just a no-op, since we got all the info we have + // from the _stat method + if (!self._ended) { + self.emit('end') + self.emit('close') + self._ended = true } - - self._buffer.push(entry) - self._process() - - return this._buffer.length === 0 } -DirWriter.prototype._process = function () { - var self = this - - // console.error('DW Process p=%j', self._processing, self.basename) - - if (self._processing) return - - var entry = self._buffer.shift() - if (!entry) { - // console.error("DW Drain") - self.emit('drain') - if (self._ended) self._finish() - return - } - - self._processing = true - // console.error("DW Entry", entry._path) - - self.emit('entry', entry) - // ok, add this entry - // - // don't allow recursive copying - var p = entry - var pp - do { - pp = p._path || p.path - if (pp === self.root._path || pp === self._path || - (pp && pp.indexOf(self._path) === 0)) { - // console.error('DW Exit (recursive)', entry.basename, self._path) - self._processing = false - if (entry._collected) entry.pipe() - return self._process() - } - p = p.parent - } while (p) +/***/ }), - // console.error("DW not recursive") +/***/ 989: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // chop off the entry's root dir, replace with ours - var props = { - parent: self, - root: self.root || self, - type: entry.type, - depth: self.depth + 1 - } +module.exports = Writer - pp = entry._path || entry.path || entry.props.path - if (entry.parent) { - pp = pp.substr(entry.parent._path.length + 1) - } - // get rid of any ../../ shenanigans - props.path = path.join(self.path, path.join('/', pp)) +var fs = __nccwpck_require__(3273) +var inherits = __nccwpck_require__(445) +var rimraf = __nccwpck_require__(5060) +var mkdir = __nccwpck_require__(6994) +var path = __nccwpck_require__(1017) +var umask = process.platform === 'win32' ? 0 : process.umask() +var getType = __nccwpck_require__(9695) +var Abstract = __nccwpck_require__(3652) - // if i have a filter, the child should inherit it. - props.filter = self.filter +// Must do this *before* loading the child classes +inherits(Writer, Abstract) - // all the rest of the stuff, copy over from the source. - Object.keys(entry.props).forEach(function (k) { - if (!props.hasOwnProperty(k)) { - props[k] = entry.props[k] - } - }) +Writer.dirmode = parseInt('0777', 8) & (~umask) +Writer.filemode = parseInt('0666', 8) & (~umask) - // not sure at this point what kind of writer this is. - var child = self._currentChild = new Writer(props) - child.on('ready', function () { - // console.error("DW Child Ready", child.type, child._path) - // console.error(" resuming", entry._path) - entry.pipe(child) - entry.resume() - }) +var DirWriter = __nccwpck_require__(178) +var LinkWriter = __nccwpck_require__(147) +var FileWriter = __nccwpck_require__(3009) +var ProxyWriter = __nccwpck_require__(4943) - // XXX Make this work in node. - // Long filenames should not break stuff. - child.on('error', function (er) { - if (child._swallowErrors) { - self.warn(er) - child.emit('end') - child.emit('close') - } else { - self.emit('error', er) - } - }) +// props is the desired state. current is optionally the current stat, +// provided here so that subclasses can avoid statting the target +// more than necessary. +function Writer (props, current) { + var self = this - // we fire _end internally *after* end, so that we don't move on - // until any "end" listeners have had their chance to do stuff. - child.on('close', onend) - var ended = false - function onend () { - if (ended) return - ended = true - // console.error("* DW Child end", child.basename) - self._currentChild = null - self._processing = false - self._process() + if (typeof props === 'string') { + props = { path: props } } -} - -/***/ }), + // polymorphism. + // call fstream.Writer(dir) to get a DirWriter object, etc. + var type = getType(props) + var ClassType = Writer -/***/ 8413: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + switch (type) { + case 'Directory': + ClassType = DirWriter + break + case 'File': + ClassType = FileWriter + break + case 'Link': + case 'SymbolicLink': + ClassType = LinkWriter + break + case null: + default: + // Don't know yet what type to create, so we wrap in a proxy. + ClassType = ProxyWriter + break + } -// Basically just a wrapper around an fs.ReadStream + if (!(self instanceof ClassType)) return new ClassType(props) -module.exports = FileReader + // now get down to business. -var fs = __nccwpck_require__(7758) -var inherits = __nccwpck_require__(4124) -var Reader = __nccwpck_require__(3284) -var EOF = {EOF: true} -var CLOSE = {CLOSE: true} + Abstract.call(self) -inherits(FileReader, Reader) + if (!props.path) self.error('Must provide a path', null, true) -function FileReader (props) { - // console.error(" FR create", props.path, props.size, new Error().stack) - var self = this - if (!(self instanceof FileReader)) { - throw new Error('FileReader must be called as constructor.') - } + // props is what we want to set. + // set some convenience properties as well. + self.type = props.type + self.props = props + self.depth = props.depth || 0 + self.clobber = props.clobber === false ? props.clobber : true + self.parent = props.parent || null + self.root = props.root || (props.parent && props.parent.root) || self - // should already be established as a File type - // XXX Todo: preserve hardlinks by tracking dev+inode+nlink, - // with a HardLinkReader class. - if (!((props.type === 'Link' && props.Link) || - (props.type === 'File' && props.File))) { - throw new Error('Non-file type ' + props.type) + self._path = self.path = path.resolve(props.path) + if (process.platform === 'win32') { + self.path = self._path = self.path.replace(/\?/g, '_') + if (self._path.length >= 260) { + self._swallowErrors = true + self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') + } } + self.basename = path.basename(props.path) + self.dirname = path.dirname(props.path) + self.linkpath = props.linkpath || null - self._buffer = [] - self._bytesEmitted = 0 - Reader.call(self, props) -} + props.parent = props.root = null -FileReader.prototype._getStream = function () { - var self = this - var stream = self._stream = fs.createReadStream(self._path, self.props) + // console.error("\n\n\n%s setting size to", props.path, props.size) + self.size = props.size - if (self.props.blksize) { - stream.bufferSize = self.props.blksize + if (typeof props.mode === 'string') { + props.mode = parseInt(props.mode, 8) } - stream.on('open', self.emit.bind(self, 'open')) + self.readable = false + self.writable = true - stream.on('data', function (c) { - // console.error('\t\t%d %s', c.length, self.basename) - self._bytesEmitted += c.length - // no point saving empty chunks - if (!c.length) { - return - } else if (self._paused || self._buffer.length) { - self._buffer.push(c) - self._read() - } else self.emit('data', c) - }) + // buffer until ready, or while handling another entry + self._buffer = [] + self.ready = false - stream.on('end', function () { - if (self._paused || self._buffer.length) { - // console.error('FR Buffering End', self._path) - self._buffer.push(EOF) - self._read() - } else { - self.emit('end') - } + self.filter = typeof props.filter === 'function' ? props.filter : null - if (self._bytesEmitted !== self.props.size) { - self.error("Didn't get expected byte count\n" + - 'expect: ' + self.props.size + '\n' + - 'actual: ' + self._bytesEmitted) - } - }) + // start the ball rolling. + // this checks what's there already, and then calls + // self._create() to call the impl-specific creation stuff. + self._stat(current) +} - stream.on('close', function () { - if (self._paused || self._buffer.length) { - // console.error('FR Buffering Close', self._path) - self._buffer.push(CLOSE) - self._read() - } else { - // console.error('FR close 1', self._path) - self.emit('close') +// Calling this means that it's something we can't create. +// Just assert that it's already there, otherwise raise a warning. +Writer.prototype._create = function () { + var self = this + fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) { + if (er) { + return self.warn('Cannot create ' + self._path + '\n' + + 'Unsupported type: ' + self.type, 'ENOTSUP') } + self._finish() }) - - stream.on('error', function (e) { - self.emit('error', e) - }) - - self._read() } -FileReader.prototype._read = function () { +Writer.prototype._stat = function (current) { var self = this - // console.error('FR _read', self._path) - if (self._paused) { - // console.error('FR _read paused', self._path) - return - } + var props = self.props + var stat = props.follow ? 'stat' : 'lstat' + var who = self._proxy || self - if (!self._stream) { - // console.error('FR _getStream calling', self._path) - return self._getStream() - } + if (current) statCb(null, current) + else fs[stat](self._path, statCb) - // clear out the buffer, if there is one. - if (self._buffer.length) { - // console.error('FR _read has buffer', self._buffer.length, self._path) - var buf = self._buffer - for (var i = 0, l = buf.length; i < l; i++) { - var c = buf[i] - if (c === EOF) { - // console.error('FR Read emitting buffered end', self._path) - self.emit('end') - } else if (c === CLOSE) { - // console.error('FR Read emitting buffered close', self._path) - self.emit('close') - } else { - // console.error('FR Read emitting buffered data', self._path) - self.emit('data', c) - } + function statCb (er, current) { + if (self.filter && !self.filter.call(who, who, current)) { + self._aborted = true + self.emit('end') + self.emit('close') + return + } - if (self._paused) { - // console.error('FR Read Re-pausing at '+i, self._path) - self._buffer = buf.slice(i) - return - } + // if it's not there, great. We'll just create it. + // if it is there, then we'll need to change whatever differs + if (er || !current) { + return create(self) } - self._buffer.length = 0 + + self._old = current + var currentType = getType(current) + + // if it's a type change, then we need to clobber or error. + // if it's not a type change, then let the impl take care of it. + if (currentType !== self.type || self.type === 'File' && current.nlink > 1) { + return rimraf(self._path, function (er) { + if (er) return self.error(er) + self._old = null + create(self) + }) + } + + // otherwise, just handle in the app-specific way + // this creates a fs.WriteStream, or mkdir's, or whatever + create(self) } -// console.error("FR _read done") -// that's about all there is to it. } -FileReader.prototype.pause = function (who) { - var self = this - // console.error('FR Pause', self._path) - if (self._paused) return - who = who || self - self._paused = true - if (self._stream) self._stream.pause() - self.emit('pause', who) -} +function create (self) { + // console.error("W create", self._path, Writer.dirmode) -FileReader.prototype.resume = function (who) { - var self = this - // console.error('FR Resume', self._path) - if (!self._paused) return - who = who || self - self.emit('resume', who) - self._paused = false - if (self._stream) self._stream.resume() - self._read() -} + // XXX Need to clobber non-dirs that are in the way, + // unless { clobber: false } in the props. + mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) { + // console.error("W created", path.dirname(self._path), er) + if (er) return self.error(er) + // later on, we have to set the mode and owner for these + self._madeDir = made + return self._create() + }) +} -/***/ }), +function endChmod (self, want, current, path, cb) { + var wantMode = want.mode + var chmod = want.follow || self.type !== 'SymbolicLink' + ? 'chmod' : 'lchmod' -/***/ 2539: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!fs[chmod]) return cb() + if (typeof wantMode !== 'number') return cb() -module.exports = FileWriter + var curMode = current.mode & parseInt('0777', 8) + wantMode = wantMode & parseInt('0777', 8) + if (wantMode === curMode) return cb() -var fs = __nccwpck_require__(7758) -var Writer = __nccwpck_require__(8680) -var inherits = __nccwpck_require__(4124) -var EOF = {} + fs[chmod](path, wantMode, cb) +} -inherits(FileWriter, Writer) +function endChown (self, want, current, path, cb) { + // Don't even try it unless root. Too easy to EPERM. + if (process.platform === 'win32') return cb() + if (!process.getuid || process.getuid() !== 0) return cb() + if (typeof want.uid !== 'number' && + typeof want.gid !== 'number') return cb() -function FileWriter (props) { - var self = this - if (!(self instanceof FileWriter)) { - throw new Error('FileWriter must be called as constructor.') - } + if (current.uid === want.uid && + current.gid === want.gid) return cb() - // should already be established as a File type - if (props.type !== 'File' || !props.File) { - throw new Error('Non-file type ' + props.type) - } + var chown = (self.props.follow || self.type !== 'SymbolicLink') + ? 'chown' : 'lchown' + if (!fs[chown]) return cb() - self._buffer = [] - self._bytesWritten = 0 + if (typeof want.uid !== 'number') want.uid = current.uid + if (typeof want.gid !== 'number') want.gid = current.gid - Writer.call(this, props) + fs[chown](path, want.uid, want.gid, cb) } -FileWriter.prototype._create = function () { - var self = this - if (self._stream) return +function endUtimes (self, want, current, path, cb) { + if (!fs.utimes || process.platform === 'win32') return cb() - var so = {} - if (self.props.flags) so.flags = self.props.flags - so.mode = Writer.filemode - if (self._old && self._old.blksize) so.bufferSize = self._old.blksize + var utimes = (want.follow || self.type !== 'SymbolicLink') + ? 'utimes' : 'lutimes' - self._stream = fs.createWriteStream(self._path, so) + if (utimes === 'lutimes' && !fs[utimes]) { + utimes = 'utimes' + } - self._stream.on('open', function () { - // console.error("FW open", self._buffer, self._path) - self.ready = true - self._buffer.forEach(function (c) { - if (c === EOF) self._stream.end() - else self._stream.write(c) - }) - self.emit('ready') - // give this a kick just in case it needs it. - self.emit('drain') - }) + if (!fs[utimes]) return cb() - self._stream.on('error', function (er) { self.emit('error', er) }) + var curA = current.atime + var curM = current.mtime + var meA = want.atime + var meM = want.mtime - self._stream.on('drain', function () { self.emit('drain') }) + if (meA === undefined) meA = curA + if (meM === undefined) meM = curM - self._stream.on('close', function () { - // console.error('\n\nFW Stream Close', self._path, self.size) - self._finish() - }) + if (!isDate(meA)) meA = new Date(meA) + if (!isDate(meM)) meA = new Date(meM) + + if (meA.getTime() === curA.getTime() && + meM.getTime() === curM.getTime()) return cb() + + fs[utimes](path, meA, meM, cb) } -FileWriter.prototype.write = function (c) { +// XXX This function is beastly. Break it up! +Writer.prototype._finish = function () { var self = this - self._bytesWritten += c.length + if (self._finishing) return + self._finishing = true - if (!self.ready) { - if (!Buffer.isBuffer(c) && typeof c !== 'string') { - throw new Error('invalid write data') - } - self._buffer.push(c) - return false - } + // console.error(" W Finish", self._path, self.size) - var ret = self._stream.write(c) - // console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length) + // set up all the things. + // At this point, we're already done writing whatever we've gotta write, + // adding files to the dir, etc. + var todo = 0 + var errState = null + var done = false - // allow 2 buffered writes, because otherwise there's just too - // much stop and go bs. - if (ret === false && self._stream._queue) { - return self._stream._queue.length <= 2 + if (self._old) { + // the times will almost *certainly* have changed. + // adds the utimes syscall, but remove another stat. + self._old.atime = new Date(0) + self._old.mtime = new Date(0) + // console.error(" W Finish Stale Stat", self._path, self.size) + setProps(self._old) } else { - return ret + var stat = self.props.follow ? 'stat' : 'lstat' + // console.error(" W Finish Stating", self._path, self.size) + fs[stat](self._path, function (er, current) { + // console.error(" W Finish Stated", self._path, self.size, current) + if (er) { + // if we're in the process of writing out a + // directory, it's very possible that the thing we're linking to + // doesn't exist yet (especially if it was intended as a symlink), + // so swallow ENOENT errors here and just soldier on. + if (er.code === 'ENOENT' && + (self.type === 'Link' || self.type === 'SymbolicLink') && + process.platform === 'win32') { + self.ready = true + self.emit('ready') + self.emit('end') + self.emit('close') + self.end = self._finish = function () {} + return + } else return self.error(er) + } + setProps(self._old = current) + }) } -} - -FileWriter.prototype.end = function (c) { - var self = this - if (c) self.write(c) + return - if (!self.ready) { - self._buffer.push(EOF) - return false + function setProps (current) { + todo += 3 + endChmod(self, self.props, current, self._path, next('chmod')) + endChown(self, self.props, current, self._path, next('chown')) + endUtimes(self, self.props, current, self._path, next('utimes')) } - return self._stream.end() -} + function next (what) { + return function (er) { + // console.error(" W Finish", what, todo) + if (errState) return + if (er) { + er.fstream_finish_call = what + return self.error(errState = er) + } + if (--todo > 0) return + if (done) return + done = true -FileWriter.prototype._finish = function () { - var self = this - if (typeof self.size === 'number' && self._bytesWritten !== self.size) { - self.error( - 'Did not get expected byte count.\n' + - 'expect: ' + self.size + '\n' + - 'actual: ' + self._bytesWritten) + // we may still need to set the mode/etc. on some parent dirs + // that were created previously. delay end/close until then. + if (!self._madeDir) return end() + else endMadeDir(self, self._path, end) + + function end (er) { + if (er) { + er.fstream_finish_call = 'setupMadeDir' + return self.error(er) + } + // all the props have been set, so we're completely done. + self.emit('end') + self.emit('close') + } + } } - Writer.prototype._finish.call(self) } +function endMadeDir (self, p, cb) { + var made = self._madeDir + // everything *between* made and path.dirname(self._path) + // needs to be set up. Note that this may just be one dir. + var d = path.dirname(p) -/***/ }), + endMadeDir_(self, d, function (er) { + if (er) return cb(er) + if (d === made) { + return cb() + } + endMadeDir(self, d, cb) + }) +} -/***/ 1600: -/***/ ((module) => { +function endMadeDir_ (self, p, cb) { + var dirProps = {} + Object.keys(self.props).forEach(function (k) { + dirProps[k] = self.props[k] -module.exports = getType + // only make non-readable dirs if explicitly requested. + if (k === 'mode' && self.type !== 'Directory') { + dirProps[k] = dirProps[k] | parseInt('0111', 8) + } + }) -function getType (st) { - var types = [ - 'Directory', - 'File', - 'SymbolicLink', - 'Link', // special for hardlinks from tarballs - 'BlockDevice', - 'CharacterDevice', - 'FIFO', - 'Socket' - ] - var type + var todo = 3 + var errState = null + fs.stat(p, function (er, current) { + if (er) return cb(errState = er) + endChmod(self, dirProps, current, p, next) + endChown(self, dirProps, current, p, next) + endUtimes(self, dirProps, current, p, next) + }) - if (st.type && types.indexOf(st.type) !== -1) { - st[st.type] = true - return st.type + function next (er) { + if (errState) return + if (er) return cb(errState = er) + if (--todo === 0) return cb() } +} - for (var i = 0, l = types.length; i < l; i++) { - type = types[i] - var is = st[type] || st['is' + type] - if (typeof is === 'function') is = is.call(st) - if (is) { - st[type] = true - st.type = type - return type - } - } +Writer.prototype.pipe = function () { + this.error("Can't pipe from writable stream") +} - return null +Writer.prototype.add = function () { + this.error("Can't add to non-Directory type") } +Writer.prototype.write = function () { + return true +} -/***/ }), +function objectToString (d) { + return Object.prototype.toString.call(d) +} -/***/ 8337: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function isDate (d) { + return typeof d === 'object' && objectToString(d) === '[object Date]' +} -// Basically just a wrapper around an fs.readlink -// -// XXX: Enhance this to support the Link type, by keeping -// a lookup table of {:}, so that hardlinks -// can be preserved in tarballs. -module.exports = LinkReader +/***/ }), -var fs = __nccwpck_require__(7758) -var inherits = __nccwpck_require__(4124) -var Reader = __nccwpck_require__(3284) +/***/ 2054: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -inherits(LinkReader, Reader) +var concatMap = __nccwpck_require__(6915); +var balanced = __nccwpck_require__(2634); -function LinkReader (props) { - var self = this - if (!(self instanceof LinkReader)) { - throw new Error('LinkReader must be called as constructor.') - } +module.exports = expandTop; - if (!((props.type === 'Link' && props.Link) || - (props.type === 'SymbolicLink' && props.SymbolicLink))) { - throw new Error('Non-link type ' + props.type) - } +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; - Reader.call(self, props) +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); } -// When piping a LinkReader into a LinkWriter, we have to -// already have the linkpath property set, so that has to -// happen *before* the "ready" event, which means we need to -// override the _stat method. -LinkReader.prototype._stat = function (currentStat) { - var self = this - fs.readlink(self._path, function (er, linkpath) { - if (er) return self.error(er) - self.linkpath = self.props.linkpath = linkpath - self.emit('linkpath', linkpath) - Reader.prototype._stat.call(self, currentStat) - }) +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); } -LinkReader.prototype._read = function () { - var self = this - if (self._paused) return - // basically just a no-op, since we got all the info we need - // from the _stat method - if (!self._ended) { - self.emit('end') - self.emit('close') - self._ended = true - } +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); } -/***/ }), - -/***/ 404: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = LinkWriter +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; -var fs = __nccwpck_require__(7758) -var Writer = __nccwpck_require__(8680) -var inherits = __nccwpck_require__(4124) -var path = __nccwpck_require__(1017) -var rimraf = __nccwpck_require__(4959) + var parts = []; + var m = balanced('{', '}', str); -inherits(LinkWriter, Writer) + if (!m) + return str.split(','); -function LinkWriter (props) { - var self = this - if (!(self instanceof LinkWriter)) { - throw new Error('LinkWriter must be called as constructor.') - } + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); - // should already be established as a Link type - if (!((props.type === 'Link' && props.Link) || - (props.type === 'SymbolicLink' && props.SymbolicLink))) { - throw new Error('Non-link type ' + props.type) + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); } - if (props.linkpath === '') props.linkpath = '.' - if (!props.linkpath) { - self.error('Need linkpath property to create ' + props.type) - } + parts.push.apply(parts, p); - Writer.call(this, props) + return parts; } -LinkWriter.prototype._create = function () { - // console.error(" LW _create") - var self = this - var hard = self.type === 'Link' || process.platform === 'win32' - var link = hard ? 'link' : 'symlink' - var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath +function expandTop(str) { + if (!str) + return []; - // can only change the link path by clobbering - // For hard links, let's just assume that's always the case, since - // there's no good way to read them if we don't already know. - if (hard) return clobber(self, lp, link) + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } - fs.readlink(self._path, function (er, p) { - // only skip creation if it's exactly the same link - if (p && p === lp) return finish(self) - clobber(self, lp, link) - }) + return expand(escapeBraces(str), true).map(unescapeBraces); } -function clobber (self, lp, link) { - rimraf(self._path, function (er) { - if (er) return self.error(er) - create(self, lp, link) - }) +function identity(e) { + return e; } -function create (self, lp, link) { - fs[link](lp, self._path, function (er) { - // if this is a hard link, and we're in the process of writing out a - // directory, it's very possible that the thing we're linking to - // doesn't exist yet (especially if it was intended as a symlink), - // so swallow ENOENT errors here and just soldier in. - // Additionally, an EPERM or EACCES can happen on win32 if it's trying - // to make a link to a directory. Again, just skip it. - // A better solution would be to have fs.symlink be supported on - // windows in some nice fashion. - if (er) { - if ((er.code === 'ENOENT' || - er.code === 'EACCES' || - er.code === 'EPERM') && process.platform === 'win32') { - self.ready = true - self.emit('ready') - self.emit('end') - self.emit('close') - self.end = self._finish = function () {} - } else return self.error(er) - } - finish(self) - }) +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); } -function finish (self) { - self.ready = true - self.emit('ready') - if (self._ended && !self._finished) self._finish() +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; } -LinkWriter.prototype.end = function () { - // console.error("LW finish in end") - this._ended = true - if (this.ready) { - this._finished = true - this._finish() +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; } -} + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } -/***/ }), + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. -/***/ 7328: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; -// A reader for when we don't yet know what kind of thing -// the thing is. + var N; -module.exports = ProxyReader + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); -var Reader = __nccwpck_require__(3284) -var getType = __nccwpck_require__(1600) -var inherits = __nccwpck_require__(4124) -var fs = __nccwpck_require__(7758) + N = []; -inherits(ProxyReader, Reader) + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } -function ProxyReader (props) { - var self = this - if (!(self instanceof ProxyReader)) { - throw new Error('ProxyReader must be called as constructor.') + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } } - self.props = props - self._buffer = [] - self.ready = false + return expansions; +} - Reader.call(self, props) + + +/***/ }), + +/***/ 466: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) } -ProxyReader.prototype._stat = function () { - var self = this - var props = self.props - // stat the thing to see what the proxy should be. - var stat = props.follow ? 'stat' : 'lstat' +var fs = __nccwpck_require__(7147) +var path = __nccwpck_require__(1017) +var minimatch = __nccwpck_require__(6942) +var isAbsolute = __nccwpck_require__(5165) +var Minimatch = minimatch.Minimatch - fs[stat](props.path, function (er, current) { - var type - if (er || !current) { - type = 'File' - } else { - type = getType(current) - } +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} - props[type] = true - props.type = self.type = type +function setupIgnores (self, options) { + self.ignore = options.ignore || [] - self._old = current - self._addProxy(Reader(props, current)) - }) + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } } -ProxyReader.prototype._addProxy = function (proxy) { - var self = this - if (self._proxyTarget) { - return self.error('proxy already set') +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) } - self._proxyTarget = proxy - proxy._proxy = self + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} - ;[ - 'error', - 'data', - 'end', - 'close', - 'linkpath', - 'entry', - 'entryEnd', - 'child', - 'childEnd', - 'warn', - 'stat' - ].forEach(function (ev) { - // console.error('~~ proxy event', ev, self.path) - proxy.on(ev, self.emit.bind(self, ev)) - }) +function setopts (self, pattern, options) { + if (!options) + options = {} - self.emit('proxy', proxy) + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } - proxy.on('ready', function () { - // console.error("~~ proxy is ready!", self.path) - self.ready = true - self.emit('ready') - }) + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs - var calls = self._buffer - self._buffer.length = 0 - calls.forEach(function (c) { - proxy[c[0]].apply(proxy, c[1]) - }) -} + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) -ProxyReader.prototype.pause = function () { - return this._proxyTarget ? this._proxyTarget.pause() : false -} + setupIgnores(self, options) -ProxyReader.prototype.resume = function () { - return this._proxyTarget ? this._proxyTarget.resume() : false -} + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") -/***/ }), + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount -/***/ 2071: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + // always treat \ in patterns as escapes, not path separators + options.allowWindowsEscape = false -// A writer for when we don't know what kind of thing -// the thing is. That is, it's not explicitly set, -// so we're going to make it whatever the thing already -// is, or "File" -// -// Until then, collect all events. + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} -module.exports = ProxyWriter +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) -var Writer = __nccwpck_require__(8680) -var getType = __nccwpck_require__(1600) -var inherits = __nccwpck_require__(4124) -var collect = __nccwpck_require__(3317) -var fs = __nccwpck_require__(7147) + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } -inherits(ProxyWriter, Writer) + if (!nou) + all = Object.keys(all) -function ProxyWriter (props) { - var self = this - if (!(self instanceof ProxyWriter)) { - throw new Error('ProxyWriter must be called as constructor.') + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } } - self.props = props - self._needDrain = false + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) - Writer.call(self, props) + self.found = all } -ProxyWriter.prototype._stat = function () { - var self = this - var props = self.props - // stat the thing to see what the proxy should be. - var stat = props.follow ? 'stat' : 'lstat' +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' - fs[stat](props.path, function (er, current) { - var type - if (er || !current) { - type = 'File' - } else { - type = getType(current) - } + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) - props[type] = true - props.type = self.type = type + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } - self._old = current - self._addProxy(Writer(props, current)) - }) + return m } -ProxyWriter.prototype._addProxy = function (proxy) { - // console.error("~~ set proxy", this.path) - var self = this - if (self._proxy) { - return self.error('proxy already set') +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) } - self._proxy = proxy - ;[ - 'ready', - 'error', - 'close', - 'pipe', - 'drain', - 'warn' - ].forEach(function (ev) { - proxy.on(ev, self.emit.bind(self, ev)) - }) - - self.emit('proxy', proxy) + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') - var calls = self._buffer - calls.forEach(function (c) { - // console.error("~~ ~~ proxy buffered call", c[0], c[1]) - proxy[c[0]].apply(proxy, c[1]) - }) - self._buffer.length = 0 - if (self._needsDrain) self.emit('drain') + return abs } -ProxyWriter.prototype.add = function (entry) { - // console.error("~~ proxy add") - collect(entry) - if (!this._proxy) { - this._buffer.push(['add', [entry]]) - this._needDrain = true +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) return false - } - return this._proxy.add(entry) -} -ProxyWriter.prototype.write = function (c) { - // console.error('~~ proxy write') - if (!this._proxy) { - this._buffer.push(['write', [c]]) - this._needDrain = true - return false - } - return this._proxy.write(c) + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) } -ProxyWriter.prototype.end = function (c) { - // console.error('~~ proxy end') - if (!this._proxy) { - this._buffer.push(['end', [c]]) +function childrenIgnored (self, path) { + if (!self.ignore.length) return false - } - return this._proxy.end(c) + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) } /***/ }), -/***/ 3284: +/***/ 4966: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = Reader +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. -var fs = __nccwpck_require__(7758) -var Stream = (__nccwpck_require__(2781).Stream) -var inherits = __nccwpck_require__(4124) -var path = __nccwpck_require__(1017) -var getType = __nccwpck_require__(1600) -var hardLinks = Reader.hardLinks = {} -var Abstract = __nccwpck_require__(9479) +module.exports = glob -// Must do this *before* loading the child classes -inherits(Reader, Abstract) +var rp = __nccwpck_require__(5717) +var minimatch = __nccwpck_require__(6942) +var Minimatch = minimatch.Minimatch +var inherits = __nccwpck_require__(445) +var EE = (__nccwpck_require__(2361).EventEmitter) +var path = __nccwpck_require__(1017) +var assert = __nccwpck_require__(9491) +var isAbsolute = __nccwpck_require__(5165) +var globSync = __nccwpck_require__(7348) +var common = __nccwpck_require__(466) +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = __nccwpck_require__(2681) +var util = __nccwpck_require__(3837) +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored -var LinkReader = __nccwpck_require__(8337) +var once = __nccwpck_require__(4822) -function Reader (props, currentStat) { - var self = this - if (!(self instanceof Reader)) return new Reader(props, currentStat) +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} - if (typeof props === 'string') { - props = { path: props } + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) } - // polymorphism. - // call fstream.Reader(dir) to get a DirReader object, etc. - // Note that, unlike in the Writer case, ProxyReader is going - // to be the *normal* state of affairs, since we rarely know - // the type of a file prior to reading it. + return new Glob(pattern, options, cb) +} - var type - var ClassType +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync - if (props.type && typeof props.type === 'function') { - type = props.type - ClassType = type - } else { - type = getType(props) - ClassType = Reader +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin } - if (currentStat && !type) { - type = getType(currentStat) - props[type] = true - props.type = type + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] } + return origin +} - switch (type) { - case 'Directory': - ClassType = __nccwpck_require__(4486) - break +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true - case 'Link': - // XXX hard links are just files. - // However, it would be good to keep track of files' dev+inode - // and nlink values, and create a HardLinkReader that emits - // a linkpath value of the original copy, so that the tar - // writer can preserve them. - // ClassType = HardLinkReader - // break + var g = new Glob(pattern, options) + var set = g.minimatch.set - case 'File': - ClassType = __nccwpck_require__(8413) - break + if (!pattern) + return false - case 'SymbolicLink': - ClassType = LinkReader - break + if (set.length > 1) + return true - case 'Socket': - ClassType = __nccwpck_require__(2470) - break + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } - case null: - ClassType = __nccwpck_require__(7328) - break + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null } - if (!(self instanceof ClassType)) { - return new ClassType(props) + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) } - Abstract.call(self) + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) - if (!props.path) { - self.error('Must provide a path', null, true) + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) } - self.readable = true - self.writable = false + var self = this + this._processing = 0 - self.type = type - self.props = props - self.depth = props.depth = props.depth || 0 - self.parent = props.parent || null - self.root = props.root || (props.parent && props.parent.root) || self + this._emitQueue = [] + this._processQueue = [] + this.paused = false - self._path = self.path = path.resolve(props.path) - if (process.platform === 'win32') { - self.path = self._path = self.path.replace(/\?/g, '_') - if (self._path.length >= 260) { - // how DOES one create files on the moon? - // if the path has spaces in it, then UNC will fail. - self._swallowErrors = true - // if (self._path.indexOf(" ") === -1) { - self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') - // } + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } } } - self.basename = props.basename = path.basename(self.path) - self.dirname = props.dirname = path.dirname(self.path) +} - // these have served their purpose, and are now just noisy clutter - props.parent = props.root = null +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return - // console.error("\n\n\n%s setting size to", props.path, props.size) - self.size = props.size - self.filter = typeof props.filter === 'function' ? props.filter : null - if (props.sort === 'alpha') props.sort = alphasort + if (this.realpath && !this._didRealpath) + return this._realpath() - // start the ball rolling. - // this will stat the thing, and then call self._read() - // to start reading whatever it is. - // console.error("calling stat", props.path, currentStat) - self._stat(currentStat) + common.finish(this) + this.emit('end', this.found) } -function alphasort (a, b) { - return a === b ? 0 - : a.toLowerCase() > b.toLowerCase() ? 1 - : a.toLowerCase() < b.toLowerCase() ? -1 - : a > b ? 1 - : -1 -} +Glob.prototype._realpath = function () { + if (this._didRealpath) + return -Reader.prototype._stat = function (currentStat) { - var self = this - var props = self.props - var stat = props.follow ? 'stat' : 'lstat' - // console.error("Reader._stat", self._path, currentStat) - if (currentStat) process.nextTick(statCb.bind(null, null, currentStat)) - else fs[stat](self._path, statCb) + this._didRealpath = true - function statCb (er, props_) { - // console.error("Reader._stat, statCb", self._path, props_, props_.nlink) - if (er) return self.error(er) + var n = this.matches.length + if (n === 0) + return this._finish() - Object.keys(props_).forEach(function (k) { - props[k] = props_[k] - }) + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) - // if it's not the expected size, then abort here. - if (undefined !== self.size && props.size !== self.size) { - return self.error('incorrect size') - } - self.size = props.size + function next () { + if (--n === 0) + self._finish() + } +} - var type = getType(props) - var handleHardlinks = props.hardlinks !== false +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() - // special little thing for handling hardlinks. - if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) { - var k = props.dev + ':' + props.ino - // console.error("Reader has nlink", self._path, k) - if (hardLinks[k] === self._path || !hardLinks[k]) { - hardLinks[k] = self._path - } else { - // switch into hardlink mode. - type = self.type = self.props.type = 'Link' - self.Link = self.props.Link = true - self.linkpath = self.props.linkpath = hardLinks[k] - // console.error("Hardlink detected, switching mode", self._path, self.linkpath) - // Setting __proto__ would arguably be the "correct" - // approach here, but that just seems too wrong. - self._stat = self._read = LinkReader.prototype._read - } - } + var found = Object.keys(matchset) + var self = this + var n = found.length - if (self.type && self.type !== type) { - self.error('Unexpected type: ' + type) - } + if (n === 0) + return cb() - // if the filter doesn't pass, then just skip over this one. - // still have to emit end so that dir-walking can move on. - if (self.filter) { - var who = self._proxy || self - // special handling for ProxyReaders - if (!self.filter.call(who, who, props)) { - if (!self._disowned) { - self.abort() - self.emit('end') - self.emit('close') - } - return - } - } + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here - // last chance to abort or disown before the flow starts! - var events = ['_stat', 'stat', 'ready'] - var e = 0 - ;(function go () { - if (self._aborted) { - self.emit('end') - self.emit('close') - return + if (--n === 0) { + self.matches[index] = set + cb() } + }) + }) +} - if (self._paused && self.type !== 'Directory') { - self.once('resume', go) - return - } +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} - var ev = events[e++] - if (!ev) { - return self._read() - } - self.emit(ev, props) - go() - })() +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') } } -Reader.prototype.pipe = function (dest) { - var self = this - if (typeof dest.add === 'function') { - // piping to a multi-compatible, and we've got directory entries. - self.on('entry', function (entry) { - var ret = dest.add(entry) - if (ret === false) { - self.pause() +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) } - }) + } } - - // console.error("R Pipe apply Stream Pipe") - return Stream.prototype.pipe.apply(this, arguments) } -Reader.prototype.pause = function (who) { - this._paused = true - who = who || this - this.emit('pause', who) - if (this._stream) this._stream.pause(who) -} +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') -Reader.prototype.resume = function (who) { - this._paused = false - who = who || this - this.emit('resume', who) - if (this._stream) this._stream.resume(who) - this._read() -} + if (this.aborted) + return -Reader.prototype._read = function () { - this.error('Cannot read unknown type: ' + this.type) -} + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + //console.error('PROCESS %d', this._processing, pattern) -/***/ }), + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. -/***/ 2470: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return -// Just get the stats, and then don't do anything. -// You can't really "read" from a socket. You "connect" to it. -// Mostly, this is here so that reading a dir with a socket in it -// doesn't blow up. + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break -module.exports = SocketReader + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } -var inherits = __nccwpck_require__(4124) -var Reader = __nccwpck_require__(3284) + var remain = pattern.slice(n) -inherits(SocketReader, Reader) + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix -function SocketReader (props) { - var self = this - if (!(self instanceof SocketReader)) { - throw new Error('SocketReader must be called as constructor.') - } + var abs = this._makeAbs(read) - if (!(props.type === 'Socket' && props.Socket)) { - throw new Error('Non-socket type ' + props.type) - } + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() - Reader.call(self, props) + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) } -SocketReader.prototype._read = function () { +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { var self = this - if (self._paused) return - // basically just a no-op, since we got all the info we have - // from the _stat method - if (!self._ended) { - self.emit('end') - self.emit('close') - self._ended = true - } + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) } +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { -/***/ }), + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() -/***/ 8680: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' -module.exports = Writer + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } -var fs = __nccwpck_require__(7758) -var inherits = __nccwpck_require__(4124) -var rimraf = __nccwpck_require__(4959) -var mkdir = __nccwpck_require__(7812) -var path = __nccwpck_require__(1017) -var umask = process.platform === 'win32' ? 0 : process.umask() -var getType = __nccwpck_require__(1600) -var Abstract = __nccwpck_require__(9479) + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) -// Must do this *before* loading the child classes -inherits(Writer, Abstract) + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() -Writer.dirmode = parseInt('0777', 8) & (~umask) -Writer.filemode = parseInt('0666', 8) & (~umask) + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. -var DirWriter = __nccwpck_require__(4745) -var LinkWriter = __nccwpck_require__(404) -var FileWriter = __nccwpck_require__(2539) -var ProxyWriter = __nccwpck_require__(2071) + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) -// props is the desired state. current is optionally the current stat, -// provided here so that subclasses can avoid statting the target -// more than necessary. -function Writer (props, current) { - var self = this + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } - if (typeof props === 'string') { - props = { path: props } + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() } - // polymorphism. - // call fstream.Writer(dir) to get a DirWriter object, etc. - var type = getType(props) - var ClassType = Writer - - switch (type) { - case 'Directory': - ClassType = DirWriter - break - case 'File': - ClassType = FileWriter - break - case 'Link': - case 'SymbolicLink': - ClassType = LinkWriter - break - case null: - default: - // Don't know yet what type to create, so we wrap in a proxy. - ClassType = ProxyWriter - break + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) } + cb() +} - if (!(self instanceof ClassType)) return new ClassType(props) - - // now get down to business. +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return - Abstract.call(self) + if (isIgnored(this, e)) + return - if (!props.path) self.error('Must provide a path', null, true) + if (this.paused) { + this._emitQueue.push([index, e]) + return + } - // props is what we want to set. - // set some convenience properties as well. - self.type = props.type - self.props = props - self.depth = props.depth || 0 - self.clobber = props.clobber === false ? props.clobber : true - self.parent = props.parent || null - self.root = props.root || (props.parent && props.parent.root) || self + var abs = isAbsolute(e) ? e : this._makeAbs(e) - self._path = self.path = path.resolve(props.path) - if (process.platform === 'win32') { - self.path = self._path = self.path.replace(/\?/g, '_') - if (self._path.length >= 260) { - self._swallowErrors = true - self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') - } - } - self.basename = path.basename(props.path) - self.dirname = path.dirname(props.path) - self.linkpath = props.linkpath || null + if (this.mark) + e = this._mark(e) - props.parent = props.root = null + if (this.absolute) + e = abs - // console.error("\n\n\n%s setting size to", props.path, props.size) - self.size = props.size + if (this.matches[index][e]) + return - if (typeof props.mode === 'string') { - props.mode = parseInt(props.mode, 8) + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return } - self.readable = false - self.writable = true - - // buffer until ready, or while handling another entry - self._buffer = [] - self.ready = false + this.matches[index][e] = true - self.filter = typeof props.filter === 'function' ? props.filter : null + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) - // start the ball rolling. - // this checks what's there already, and then calls - // self._create() to call the impl-specific creation stuff. - self._stat(current) + this.emit('match', e) } -// Calling this means that it's something we can't create. -// Just assert that it's already there, otherwise raise a warning. -Writer.prototype._create = function () { +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs var self = this - fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) { - if (er) { - return self.warn('Cannot create ' + self._path + '\n' + - 'Unsupported type: ' + self.type, 'ENOTSUP') - } - self._finish() - }) + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } } -Writer.prototype._stat = function (current) { - var self = this - var props = self.props - var stat = props.follow ? 'stat' : 'lstat' - var who = self._proxy || self +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return - if (current) statCb(null, current) - else fs[stat](self._path, statCb) + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return - function statCb (er, current) { - if (self.filter && !self.filter.call(who, who, current)) { - self._aborted = true - self.emit('end') - self.emit('close') - return - } + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) - // if it's not there, great. We'll just create it. - // if it is there, then we'll need to change whatever differs - if (er || !current) { - return create(self) - } + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() - self._old = current - var currentType = getType(current) + if (Array.isArray(c)) + return cb(null, c) + } - // if it's a type change, then we need to clobber or error. - // if it's not a type change, then let the impl take care of it. - if (currentType !== self.type || self.type === 'File' && current.nlink > 1) { - return rimraf(self._path, function (er) { - if (er) return self.error(er) - self._old = null - create(self) - }) - } + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} - // otherwise, just handle in the app-specific way - // this creates a fs.WriteStream, or mkdir's, or whatever - create(self) +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) } } -function create (self) { - // console.error("W create", self._path, Writer.dirmode) +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return - // XXX Need to clobber non-dirs that are in the way, - // unless { clobber: false } in the props. - mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) { - // console.error("W created", path.dirname(self._path), er) - if (er) return self.error(er) + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } - // later on, we have to set the mode and owner for these - self._madeDir = made - return self._create() - }) + this.cache[abs] = entries + return cb(null, entries) } -function endChmod (self, want, current, path, cb) { - var wantMode = want.mode - var chmod = want.follow || self.type !== 'SymbolicLink' - ? 'chmod' : 'lchmod' +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return - if (!fs[chmod]) return cb() - if (typeof wantMode !== 'number') return cb() + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break - var curMode = current.mode & parseInt('0777', 8) - wantMode = wantMode & parseInt('0777', 8) - if (wantMode === curMode) return cb() + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break - fs[chmod](path, wantMode, cb) + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() } -function endChown (self, want, current, path, cb) { - // Don't even try it unless root. Too easy to EPERM. - if (process.platform === 'win32') return cb() - if (!process.getuid || process.getuid() !== 0) return cb() - if (typeof want.uid !== 'number' && - typeof want.gid !== 'number') return cb() +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} - if (current.uid === want.uid && - current.gid === want.gid) return cb() - var chown = (self.props.follow || self.type !== 'SymbolicLink') - ? 'chown' : 'lchown' - if (!fs[chown]) return cb() +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) - if (typeof want.uid !== 'number') want.uid = current.uid - if (typeof want.gid !== 'number') want.gid = current.gid + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() - fs[chown](path, want.uid, want.gid, cb) -} + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) -function endUtimes (self, want, current, path, cb) { - if (!fs.utimes || process.platform === 'win32') return cb() + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) - var utimes = (want.follow || self.type !== 'SymbolicLink') - ? 'utimes' : 'lutimes' + var isSym = this.symlinks[abs] + var len = entries.length - if (utimes === 'lutimes' && !fs[utimes]) { - utimes = 'utimes' + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) } - if (!fs[utimes]) return cb() + cb() +} - var curA = current.atime - var curM = current.mtime - var meA = want.atime - var meM = want.mtime +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { - if (meA === undefined) meA = curA - if (meM === undefined) meM = curM + //console.error('ps2', prefix, exists) - if (!isDate(meA)) meA = new Date(meA) - if (!isDate(meM)) meA = new Date(meM) + if (!this.matches[index]) + this.matches[index] = Object.create(null) - if (meA.getTime() === curA.getTime() && - meM.getTime() === curM.getTime()) return cb() + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() - fs[utimes](path, meA, meM, cb) + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() } -// XXX This function is beastly. Break it up! -Writer.prototype._finish = function () { - var self = this +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' - if (self._finishing) return - self._finishing = true + if (f.length > this.maxLength) + return cb() - // console.error(" W Finish", self._path, self.size) + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] - // set up all the things. - // At this point, we're already done writing whatever we've gotta write, - // adding files to the dir, etc. - var todo = 0 - var errState = null - var done = false + if (Array.isArray(c)) + c = 'DIR' - if (self._old) { - // the times will almost *certainly* have changed. - // adds the utimes syscall, but remove another stat. - self._old.atime = new Date(0) - self._old.mtime = new Date(0) - // console.error(" W Finish Stale Stat", self._path, self.size) - setProps(self._old) - } else { - var stat = self.props.follow ? 'stat' : 'lstat' - // console.error(" W Finish Stating", self._path, self.size) - fs[stat](self._path, function (er, current) { - // console.error(" W Finish Stated", self._path, self.size, current) - if (er) { - // if we're in the process of writing out a - // directory, it's very possible that the thing we're linking to - // doesn't exist yet (especially if it was intended as a symlink), - // so swallow ENOENT errors here and just soldier on. - if (er.code === 'ENOENT' && - (self.type === 'Link' || self.type === 'SymbolicLink') && - process.platform === 'win32') { - self.ready = true - self.emit('ready') - self.emit('end') - self.emit('close') - self.end = self._finish = function () {} - return - } else return self.error(er) - } - setProps(self._old = current) - }) - } + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) - return + if (needDir && c === 'FILE') + return cb() - function setProps (current) { - todo += 3 - endChmod(self, self.props, current, self._path, next('chmod')) - endChown(self, self.props, current, self._path, next('chown')) - endUtimes(self, self.props, current, self._path, next('utimes')) + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. } - function next (what) { - return function (er) { - // console.error(" W Finish", what, todo) - if (errState) return - if (er) { - er.fstream_finish_call = what - return self.error(errState = er) - } - if (--todo > 0) return - if (done) return - done = true + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } - // we may still need to set the mode/etc. on some parent dirs - // that were created previously. delay end/close until then. - if (!self._madeDir) return end() - else endMadeDir(self, self._path, end) + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) - function end (er) { - if (er) { - er.fstream_finish_call = 'setupMadeDir' - return self.error(er) - } - // all the props have been set, so we're completely done. - self.emit('end') - self.emit('close') - } + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) } } } -function endMadeDir (self, p, cb) { - var made = self._madeDir - // everything *between* made and path.dirname(self._path) - // needs to be set up. Note that this may just be one dir. - var d = path.dirname(p) +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } - endMadeDir_(self, d, function (er) { - if (er) return cb(er) - if (d === made) { - return cb() - } - endMadeDir(self, d, cb) - }) -} + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat -function endMadeDir_ (self, p, cb) { - var dirProps = {} - Object.keys(self.props).forEach(function (k) { - dirProps[k] = self.props[k] + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) - // only make non-readable dirs if explicitly requested. - if (k === 'mode' && self.type !== 'Directory') { - dirProps[k] = dirProps[k] | parseInt('0111', 8) - } - }) + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c - var todo = 3 - var errState = null - fs.stat(p, function (er, current) { - if (er) return cb(errState = er) - endChmod(self, dirProps, current, p, next) - endChown(self, dirProps, current, p, next) - endUtimes(self, dirProps, current, p, next) - }) + if (needDir && c === 'FILE') + return cb() - function next (er) { - if (errState) return - if (er) return cb(errState = er) - if (--todo === 0) return cb() - } + return cb(null, c, stat) } -Writer.prototype.pipe = function () { - this.error("Can't pipe from writable stream") -} -Writer.prototype.add = function () { - this.error("Can't add to non-Directory type") -} +/***/ }), -Writer.prototype.write = function () { - return true -} +/***/ 7348: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function objectToString (d) { - return Object.prototype.toString.call(d) -} +module.exports = globSync +globSync.GlobSync = GlobSync -function isDate (d) { - return typeof d === 'object' && objectToString(d) === '[object Date]' +var rp = __nccwpck_require__(5717) +var minimatch = __nccwpck_require__(6942) +var Minimatch = minimatch.Minimatch +var Glob = (__nccwpck_require__(4966).Glob) +var util = __nccwpck_require__(3837) +var path = __nccwpck_require__(1017) +var assert = __nccwpck_require__(9491) +var isAbsolute = __nccwpck_require__(5165) +var common = __nccwpck_require__(466) +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found } +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') -/***/ }), + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') -/***/ 7812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) -var path = __nccwpck_require__(1017); -var fs = __nccwpck_require__(7147); -var _0777 = parseInt('0777', 8); + setopts(this, pattern, options) -module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; + if (this.noprocess) + return this -function mkdirP (p, opts, f, made) { - if (typeof opts === 'function') { - f = opts; - opts = {}; - } - else if (!opts || typeof opts !== 'object') { - opts = { mode: opts }; - } - - var mode = opts.mode; - var xfs = opts.fs || fs; - - if (mode === undefined) { - mode = _0777 - } - if (!made) made = null; - - var cb = f || /* istanbul ignore next */ function () {}; - p = path.resolve(p); - - xfs.mkdir(p, mode, function (er) { - if (!er) { - made = made || p; - return cb(null, made); - } - switch (er.code) { - case 'ENOENT': - /* istanbul ignore if */ - if (path.dirname(p) === p) return cb(er); - mkdirP(path.dirname(p), opts, function (er, made) { - /* istanbul ignore if */ - if (er) cb(er, made); - else mkdirP(p, opts, cb, made); - }); - break; + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} - // In the case of any other error, just see if there's a dir - // there already. If so, then hooray! If not, then something - // is borked. - default: - xfs.stat(p, function (er2, stat) { - // if the stat fails, then that's super weird. - // let the original error be the failure reason. - if (er2 || !stat.isDirectory()) cb(er, made) - else cb(null, made); - }); - break; +GlobSync.prototype._finish = function () { + assert.ok(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er } - }); + } + }) + } + common.finish(this) } -mkdirP.sync = function sync (p, opts, made) { - if (!opts || typeof opts !== 'object') { - opts = { mode: opts }; - } - - var mode = opts.mode; - var xfs = opts.fs || fs; - - if (mode === undefined) { - mode = _0777 - } - if (!made) made = null; - p = path.resolve(p); +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert.ok(this instanceof GlobSync) - try { - xfs.mkdirSync(p, mode); - made = made || p; - } - catch (err0) { - switch (err0.code) { - case 'ENOENT' : - made = sync(path.dirname(p), opts, made); - sync(p, opts, made); - break; + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. - // In the case of any other error, just see if there's a dir - // there already. If so, then hooray! If not, then something - // is borked. - default: - var stat; - try { - stat = xfs.statSync(p); - } - catch (err1) /* istanbul ignore next */ { - throw err0; - } - /* istanbul ignore if */ - if (!stat.isDirectory()) throw err0; - break; - } - } + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return - return made; -}; + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } -/***/ }), + var remain = pattern.slice(n) -/***/ 7625: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix -exports.setopts = setopts -exports.ownProp = ownProp -exports.makeAbs = makeAbs -exports.finish = finish -exports.mark = mark -exports.isIgnored = isIgnored -exports.childrenIgnored = childrenIgnored + var abs = this._makeAbs(read) -function ownProp (obj, field) { - return Object.prototype.hasOwnProperty.call(obj, field) + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) } -var fs = __nccwpck_require__(7147) -var path = __nccwpck_require__(1017) -var minimatch = __nccwpck_require__(3973) -var isAbsolute = __nccwpck_require__(8714) -var Minimatch = minimatch.Minimatch -function alphasort (a, b) { - return a.localeCompare(b, 'en') -} +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) -function setupIgnores (self, options) { - self.ignore = options.ignore || [] + // if the abs isn't a dir, then nothing can match! + if (!entries) + return - if (!Array.isArray(self.ignore)) - self.ignore = [self.ignore] + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' - if (self.ignore.length) { - self.ignore = self.ignore.map(ignoreMap) + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } } -} -// ignore patterns are always in dot:true mode. -function ignoreMap (pattern) { - var gmatcher = null - if (pattern.slice(-3) === '/**') { - var gpattern = pattern.replace(/(\/\*\*)+$/, '') - gmatcher = new Minimatch(gpattern, { dot: true }) + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return } - return { - matcher: new Minimatch(pattern, { dot: true }), - gmatcher: gmatcher + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) } } -function setopts (self, pattern, options) { - if (!options) - options = {} - - // base-matching: just use globstar for that. - if (options.matchBase && -1 === pattern.indexOf("/")) { - if (options.noglobstar) { - throw new Error("base matching requires globstar") - } - pattern = "**/" + pattern - } - self.silent = !!options.silent - self.pattern = pattern - self.strict = options.strict !== false - self.realpath = !!options.realpath - self.realpathCache = options.realpathCache || Object.create(null) - self.follow = !!options.follow - self.dot = !!options.dot - self.mark = !!options.mark - self.nodir = !!options.nodir - if (self.nodir) - self.mark = true - self.sync = !!options.sync - self.nounique = !!options.nounique - self.nonull = !!options.nonull - self.nosort = !!options.nosort - self.nocase = !!options.nocase - self.stat = !!options.stat - self.noprocess = !!options.noprocess - self.absolute = !!options.absolute - self.fs = options.fs || fs +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return - self.maxLength = options.maxLength || Infinity - self.cache = options.cache || Object.create(null) - self.statCache = options.statCache || Object.create(null) - self.symlinks = options.symlinks || Object.create(null) + var abs = this._makeAbs(e) - setupIgnores(self, options) + if (this.mark) + e = this._mark(e) - self.changedCwd = false - var cwd = process.cwd() - if (!ownProp(options, "cwd")) - self.cwd = cwd - else { - self.cwd = path.resolve(options.cwd) - self.changedCwd = self.cwd !== cwd + if (this.absolute) { + e = abs } - self.root = options.root || path.resolve(self.cwd, "/") - self.root = path.resolve(self.root) - if (process.platform === "win32") - self.root = self.root.replace(/\\/g, "/") + if (this.matches[index][e]) + return - // TODO: is an absolute `cwd` supposed to be resolved against `root`? - // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') - self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) - if (process.platform === "win32") - self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") - self.nomount = !!options.nomount + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } - // disable comments and negation in Minimatch. - // Note that they are not supported in Glob itself anyway. - options.nonegate = true - options.nocomment = true - // always treat \ in patterns as escapes, not path separators - options.allowWindowsEscape = false + this.matches[index][e] = true - self.minimatch = new Minimatch(pattern, options) - self.options = self.minimatch.options + if (this.stat) + this._stat(e) } -function finish (self) { - var nou = self.nounique - var all = nou ? [] : Object.create(null) - - for (var i = 0, l = self.matches.length; i < l; i ++) { - var matches = self.matches[i] - if (!matches || Object.keys(matches).length === 0) { - if (self.nonull) { - // do like the shell, and spit out the literal glob - var literal = self.minimatch.globSet[i] - if (nou) - all.push(literal) - else - all[literal] = true - } - } else { - // had matches - var m = Object.keys(matches) - if (nou) - all.push.apply(all, m) - else - m.forEach(function (m) { - all[m] = true - }) - } - } - - if (!nou) - all = Object.keys(all) - if (!self.nosort) - all = all.sort(alphasort) +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) - // at *some* point we statted all of these - if (self.mark) { - for (var i = 0; i < all.length; i++) { - all[i] = self._mark(all[i]) - } - if (self.nodir) { - all = all.filter(function (e) { - var notDir = !(/\/$/.test(e)) - var c = self.cache[e] || self.cache[makeAbs(self, e)] - if (notDir && c) - notDir = c !== 'DIR' && !Array.isArray(c) - return notDir - }) + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null } } - if (self.ignore.length) - all = all.filter(function(m) { - return !isIgnored(self, m) - }) + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym - self.found = all + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries } -function mark (self, p) { - var abs = makeAbs(self, p) - var c = self.cache[abs] - var m = p - if (c) { - var isDir = c === 'DIR' || Array.isArray(c) - var slash = p.slice(-1) === '/' +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries - if (isDir && !slash) - m += '/' - else if (!isDir && slash) - m = m.slice(0, -1) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) - if (m !== p) { - var mabs = makeAbs(self, m) - self.statCache[mabs] = self.statCache[abs] - self.cache[mabs] = self.cache[abs] - } + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c } - return m + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } } -// lotta situps... -function makeAbs (self, f) { - var abs = f - if (f.charAt(0) === '/') { - abs = path.join(self.root, f) - } else if (isAbsolute(f) || f === '') { - abs = f - } else if (self.changedCwd) { - abs = path.resolve(self.cwd, f) - } else { - abs = path.resolve(f) +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } } - if (process.platform === 'win32') - abs = abs.replace(/\\/g, '/') + this.cache[abs] = entries - return abs + // mark and cache dir-ness + return entries } +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break -// Return true, if pattern ends with globstar '**', for the accompanying parent directory. -// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents -function isIgnored (self, path) { - if (!self.ignore.length) - return false + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break - return self.ignore.some(function(item) { - return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) - }) + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } } -function childrenIgnored (self, path) { - if (!self.ignore.length) - return false - - return self.ignore.some(function(item) { - return !!(item.gmatcher && item.gmatcher.match(path)) - }) -} +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) -/***/ }), + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return -/***/ 1957: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) -// Approach: -// -// 1. Get the minimatch set -// 2. For each pattern in the set, PROCESS(pattern, false) -// 3. Store matches per-set, then uniq them -// -// PROCESS(pattern, inGlobStar) -// Get the first [n] items from pattern that are all strings -// Join these together. This is PREFIX. -// If there is no more remaining, then stat(PREFIX) and -// add to matches if it succeeds. END. -// -// If inGlobStar and PREFIX is symlink and points to dir -// set ENTRIES = [] -// else readdir(PREFIX) as ENTRIES -// If fail, END -// -// with ENTRIES -// If pattern[n] is GLOBSTAR -// // handle the case where the globstar match is empty -// // by pruning it out, and testing the resulting pattern -// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) -// // handle other cases. -// for ENTRY in ENTRIES (not dotfiles) -// // attach globstar + tail onto the entry -// // Mark that this entry is a globstar match -// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) -// -// else // not globstar -// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) -// Test ENTRY against pattern[n] -// If fails, continue -// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) -// -// Caveat: -// Cache all stats and readdirs results to minimize syscall. Since all -// we ever care about is existence and directory-ness, we can just keep -// `true` for files, and [children,...] for directories, or `false` for -// things that don't exist. + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) -module.exports = glob + var len = entries.length + var isSym = this.symlinks[abs] -var rp = __nccwpck_require__(6863) -var minimatch = __nccwpck_require__(3973) -var Minimatch = minimatch.Minimatch -var inherits = __nccwpck_require__(4124) -var EE = (__nccwpck_require__(2361).EventEmitter) -var path = __nccwpck_require__(1017) -var assert = __nccwpck_require__(9491) -var isAbsolute = __nccwpck_require__(8714) -var globSync = __nccwpck_require__(9010) -var common = __nccwpck_require__(7625) -var setopts = common.setopts -var ownProp = common.ownProp -var inflight = __nccwpck_require__(2492) -var util = __nccwpck_require__(3837) -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return -var once = __nccwpck_require__(1223) + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue -function glob (pattern, options, cb) { - if (typeof options === 'function') cb = options, options = {} - if (!options) options = {} + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) - if (options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return globSync(pattern, options) + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) } - - return new Glob(pattern, options, cb) } -glob.sync = globSync -var GlobSync = glob.GlobSync = globSync.GlobSync +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) -// old api surface -glob.glob = glob + if (!this.matches[index]) + this.matches[index] = Object.create(null) -function extend (origin, add) { - if (add === null || typeof add !== 'object') { - return origin - } + // If it doesn't exist, then just mark the lack of results + if (!exists) + return - var keys = Object.keys(add) - var i = keys.length - while (i--) { - origin[keys[i]] = add[keys[i]] + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } } - return origin -} - -glob.hasMagic = function (pattern, options_) { - var options = extend({}, options_) - options.noprocess = true - - var g = new Glob(pattern, options) - var set = g.minimatch.set - - if (!pattern) - return false - if (set.length > 1) - return true - - for (var j = 0; j < set[0].length; j++) { - if (typeof set[0][j] !== 'string') - return true - } + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') - return false + // Mark this as a match + this._emitMatch(index, prefix) } -glob.Glob = Glob -inherits(Glob, EE) -function Glob (pattern, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' - if (options && options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return new GlobSync(pattern, options) - } + if (f.length > this.maxLength) + return false - if (!(this instanceof Glob)) - return new Glob(pattern, options, cb) + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] - setopts(this, pattern, options) - this._didRealPath = false + if (Array.isArray(c)) + c = 'DIR' - // process each pattern in the minimatch set - var n = this.minimatch.set.length + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c - // The matches are stored as {: true,...} so that - // duplicates are automagically pruned. - // Later, we do an Object.keys() on these. - // Keep them as a list so we can fill in when nonull is set. - this.matches = new Array(n) + if (needDir && c === 'FILE') + return false - if (typeof cb === 'function') { - cb = once(cb) - this.on('error', cb) - this.on('end', function (matches) { - cb(null, matches) - }) + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. } - var self = this - this._processing = 0 - - this._emitQueue = [] - this._processQueue = [] - this.paused = false - - if (this.noprocess) - return this - - if (n === 0) - return done() - - var sync = true - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false, done) - } - sync = false + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } - function done () { - --self._processing - if (self._processing <= 0) { - if (sync) { - process.nextTick(function () { - self._finish() - }) - } else { - self._finish() + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat } + } else { + stat = lstat } } -} - -Glob.prototype._finish = function () { - assert(this instanceof Glob) - if (this.aborted) - return - if (this.realpath && !this._didRealpath) - return this._realpath() + this.statCache[abs] = stat - common.finish(this) - this.emit('end', this.found) -} + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' -Glob.prototype._realpath = function () { - if (this._didRealpath) - return + this.cache[abs] = this.cache[abs] || c - this._didRealpath = true + if (needDir && c === 'FILE') + return false - var n = this.matches.length - if (n === 0) - return this._finish() + return c +} - var self = this - for (var i = 0; i < this.matches.length; i++) - this._realpathSet(i, next) +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} - function next () { - if (--n === 0) - self._finish() - } +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) } -Glob.prototype._realpathSet = function (index, cb) { - var matchset = this.matches[index] - if (!matchset) - return cb() - var found = Object.keys(matchset) - var self = this - var n = found.length +/***/ }), - if (n === 0) - return cb() +/***/ 6942: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var set = this.matches[index] = Object.create(null) - found.forEach(function (p, i) { - // If there's a problem with the stat, then it means that - // one or more of the links in the realpath couldn't be - // resolved. just return the abs value in that case. - p = self._makeAbs(p) - rp.realpath(p, self.realpathCache, function (er, real) { - if (!er) - set[real] = true - else if (er.syscall === 'stat') - set[p] = true - else - self.emit('error', er) // srsly wtf right here +module.exports = minimatch +minimatch.Minimatch = Minimatch - if (--n === 0) { - self.matches[index] = set - cb() - } - }) - }) +var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || { + sep: '/' } +minimatch.sep = path.sep -Glob.prototype._mark = function (p) { - return common.mark(this, p) -} +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __nccwpck_require__(2054) -Glob.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } } -Glob.prototype.abort = function () { - this.aborted = true - this.emit('abort') -} +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' -Glob.prototype.pause = function () { - if (!this.paused) { - this.paused = true - this.emit('pause') - } +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) } -Glob.prototype.resume = function () { - if (this.paused) { - this.emit('resume') - this.paused = false - if (this._emitQueue.length) { - var eq = this._emitQueue.slice(0) - this._emitQueue.length = 0 - for (var i = 0; i < eq.length; i ++) { - var e = eq[i] - this._emitMatch(e[0], e[1]) - } - } - if (this._processQueue.length) { - var pq = this._processQueue.slice(0) - this._processQueue.length = 0 - for (var i = 0; i < pq.length; i ++) { - var p = pq[i] - this._processing-- - this._process(p[0], p[1], p[2], p[3]) - } - } +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) } } -Glob.prototype._process = function (pattern, index, inGlobStar, cb) { - assert(this instanceof Glob) - assert(typeof cb === 'function') - - if (this.aborted) - return +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} - this._processing++ - if (this.paused) { - this._processQueue.push([pattern, index, inGlobStar, cb]) - return +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch } - //console.error('PROCESS %d', this._processing, pattern) + var orig = minimatch - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) } - // now n is the index of the first one that is *not* a string. - - // see if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index, cb) - return - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) } - var remain = pattern.slice(n) + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || - isAbsolute(pattern.map(function (p) { - return typeof p === 'string' ? p : '[*]' - }).join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } - var abs = this._makeAbs(read) + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } - //if ignored, skip _processing - if (childrenIgnored(this, read)) - return cb() + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) + return m } -Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch } -Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return cb() +function minimatch (p, pattern, options) { + assertValidPattern(pattern) - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' + if (!options) options = {} - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false } - //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + return new Minimatch(pattern, options).match(p) +} - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return cb() +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. + assertValidPattern(pattern) - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) + if (!options) options = {} - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } + pattern = pattern.trim() - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return cb() + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') } - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - this._process([e].concat(remain), index, inGlobStar, cb) - } - cb() + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() } -Glob.prototype._emitMatch = function (index, e) { - if (this.aborted) - return +Minimatch.prototype.debug = function () {} - if (isIgnored(this, e)) - return +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options - if (this.paused) { - this._emitQueue.push([index, e]) + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true return } - var abs = isAbsolute(e) ? e : this._makeAbs(e) + // step 1: figure out negation, etc. + this.parseNegate() - if (this.mark) - e = this._mark(e) + // step 2: expand braces + var set = this.globSet = this.braceExpand() - if (this.absolute) - e = abs + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } - if (this.matches[index][e]) - return + this.debug(this.pattern, set) - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) - this.matches[index][e] = true + this.debug(this.pattern, set) - var st = this.statCache[abs] - if (st) - this.emit('stat', e, st) + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) - this.emit('match', e) + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set } -Glob.prototype._readdirInGlobStar = function (abs, cb) { - if (this.aborted) - return +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false, cb) + if (options.nonegate) return - var lstatkey = 'lstat\0' + abs - var self = this - var lstatcb = inflight(lstatkey, lstatcb_) + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } - if (lstatcb) - self.fs.lstat(abs, lstatcb) + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} - function lstatcb_ (er, lstat) { - if (er && er.code === 'ENOENT') - return cb() +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} - var isSym = lstat && lstat.isSymbolicLink() - self.symlinks[abs] = isSym +Minimatch.prototype.braceExpand = braceExpand - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) { - self.cache[abs] = 'FILE' - cb() - } else - self._readdir(abs, false, cb) +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] } + + return expand(pattern) } -Glob.prototype._readdir = function (abs, inGlobStar, cb) { - if (this.aborted) - return +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } - cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) - if (!cb) - return + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} - //console.error('RD %j %j', +inGlobStar, abs) - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs, cb) +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return cb() + var options = this.options - if (Array.isArray(c)) - return cb(null, c) + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' } + if (pattern === '') return '' + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' var self = this - self.fs.readdir(abs, readdirCb(this, abs, cb)) -} -function readdirCb (self, abs, cb) { - return function (er, entries) { - if (er) - self._readdirError(abs, er, cb) - else - self._readdirEntries(abs, entries, cb) + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } } -} -Glob.prototype._readdirEntries = function (abs, entries, cb) { - if (this.aborted) - return + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue } - } - this.cache[abs] = entries - return cb(null, entries) -} + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } -Glob.prototype._readdirError = function (f, er, cb) { - if (this.aborted) - return + case '\\': + clearStateChar() + escaping = true + continue - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - this.emit('error', error) - this.abort() - } - break + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) { - this.emit('error', er) - // If the error is handled, then we abort - // if not, we threw out of here - this.abort() - } - if (!this.silent) - console.error('glob error', er) - break - } + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue - return cb() -} + case '(': + if (inClass) { + re += '(' + continue + } -Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} + if (!stateChar) { + re += '\\(' + continue + } + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue -Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - //console.error('pgs2', prefix, remain[0], entries) + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return cb() + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false, cb) + clearStateChar() + re += '|' + continue - var isSym = this.symlinks[abs] - var len = entries.length + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return cb() + if (inClass) { + re += '\\' + c + continue + } - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) + inClass = true + classStart = i + reClassStart = re.length + re += c continue - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true, cb) + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } - var below = gspref.concat(entries[i], remain) - this._process(below, index, true, cb) - } + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } - cb() -} + // finish up the class. + hasMagic = true + inClass = false + re += c + continue -Glob.prototype._processSimple = function (prefix, index, cb) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var self = this - this._stat(prefix, function (er, exists) { - self._processSimple2(prefix, index, er, exists, cb) - }) -} -Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + default: + // swallow any state char that wasn't consumed + clearStateChar() - //console.error('ps2', prefix, exists) + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } - if (!this.matches[index]) - this.matches[index] = Object.create(null) + re += c - // If it doesn't exist, then just mark the lack of results - if (!exists) - return cb() + } // switch + } // for - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] } - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } - // Mark this as a match - this._emitMatch(index, prefix) - cb() -} + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) -// Returns either 'DIR', 'FILE', or false -Glob.prototype._stat = function (f, cb) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type - if (f.length > this.maxLength) - return cb() + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } - if (Array.isArray(c)) - c = 'DIR' + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return cb(null, c) + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] - if (needDir && c === 'FILE') - return cb() + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } + nlLast += nlAfter - var exists - var stat = this.statCache[abs] - if (stat !== undefined) { - if (stat === false) - return cb(null, stat) - else { - var type = stat.isDirectory() ? 'DIR' : 'FILE' - if (needDir && type === 'FILE') - return cb() - else - return cb(null, type, stat) + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') } - } - - var self = this - var statcb = inflight('stat\0' + abs, lstatcb_) - if (statcb) - self.fs.lstat(abs, statcb) + nlAfter = cleanAfter - function lstatcb_ (er, lstat) { - if (lstat && lstat.isSymbolicLink()) { - // If it's a symlink, then treat it as the target, unless - // the target does not exist, then treat it as a file. - return self.fs.stat(abs, function (er, stat) { - if (er) - self._stat2(f, abs, null, lstat, cb) - else - self._stat2(f, abs, er, stat, cb) - }) - } else { - self._stat2(f, abs, er, lstat, cb) + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe } -} -Glob.prototype._stat2 = function (f, abs, er, stat, cb) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return cb() + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re } - var needDir = f.slice(-1) === '/' - this.statCache[abs] = stat - - if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) - return cb(null, false, stat) - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return cb() + if (addPatternStart) { + re = patternStart + re + } - return cb(null, c, stat) -} + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } -/***/ }), + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } -/***/ 9010: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + regExp._glob = pattern + regExp._src = re -module.exports = globSync -globSync.GlobSync = GlobSync + return regExp +} -var rp = __nccwpck_require__(6863) -var minimatch = __nccwpck_require__(3973) -var Minimatch = minimatch.Minimatch -var Glob = (__nccwpck_require__(1957).Glob) -var util = __nccwpck_require__(3837) -var path = __nccwpck_require__(1017) -var assert = __nccwpck_require__(9491) -var isAbsolute = __nccwpck_require__(8714) -var common = __nccwpck_require__(7625) -var setopts = common.setopts -var ownProp = common.ownProp -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} -function globSync (pattern, options) { - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp - return new GlobSync(pattern, options).found -} + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set -function GlobSync (pattern, options) { - if (!pattern) - throw new Error('must provide pattern') + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' - if (!(this instanceof GlobSync)) - return new GlobSync(pattern, options) + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') - setopts(this, pattern, options) + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' - if (this.noprocess) - return this + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' - var n = this.minimatch.set.length - this.matches = new Array(n) - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false) + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false } - this._finish() + return this.regexp } -GlobSync.prototype._finish = function () { - assert.ok(this instanceof GlobSync) - if (this.realpath) { - var self = this - this.matches.forEach(function (matchset, index) { - var set = self.matches[index] = Object.create(null) - for (var p in matchset) { - try { - p = self._makeAbs(p) - var real = rp.realpathSync(p, self.realpathCache) - set[real] = true - } catch (er) { - if (er.syscall === 'stat') - set[self._makeAbs(p)] = true - else - throw er - } - } - }) +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) } - common.finish(this) + return list } +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true -GlobSync.prototype._process = function (pattern, index, inGlobStar) { - assert.ok(this instanceof GlobSync) + var options = this.options - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') } - // now n is the index of the first one that is *not* a string. - // See if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index) - return + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break } - var remain = pattern.slice(n) + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || - isAbsolute(pattern.map(function (p) { - return typeof p === 'string' ? p : '[*]' - }).join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} - var abs = this._makeAbs(read) +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options - //if ignored, skip processing - if (childrenIgnored(this, read)) - return + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar) -} + this.debug('matchOne', file.length, pattern.length) + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] -GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar) + this.debug(pattern, p, f) - // if the abs isn't a dir, then nothing can match! - if (!entries) - return + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true } - if (m) - matchedEntries.push(e) - } - } - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix.slice(-1) !== '/') - e = prefix + '/' + e - else - e = prefix + e + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } } - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true } - this._emitMatch(index, e) + return false } - // This was the last one, and no stats were needed - return + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false } - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) - newPattern = [prefix, e] - else - newPattern = [e] - this._process(newPattern.concat(remain), index, inGlobStar) + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') } +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} -GlobSync.prototype._emitMatch = function (index, e) { - if (isIgnored(this, e)) - return +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} - var abs = this._makeAbs(e) - if (this.mark) - e = this._mark(e) +/***/ }), - if (this.absolute) { - e = abs - } +/***/ 6994: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (this.matches[index][e]) - return +var path = __nccwpck_require__(1017); +var fs = __nccwpck_require__(7147); +var _0777 = parseInt('0777', 8); - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } +module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; - this.matches[index][e] = true +function mkdirP (p, opts, f, made) { + if (typeof opts === 'function') { + f = opts; + opts = {}; + } + else if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 + } + if (!made) made = null; + + var cb = f || /* istanbul ignore next */ function () {}; + p = path.resolve(p); + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p; + return cb(null, made); + } + switch (er.code) { + case 'ENOENT': + /* istanbul ignore if */ + if (path.dirname(p) === p) return cb(er); + mkdirP(path.dirname(p), opts, function (er, made) { + /* istanbul ignore if */ + if (er) cb(er, made); + else mkdirP(p, opts, cb, made); + }); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) cb(er, made) + else cb(null, made); + }); + break; + } + }); +} + +mkdirP.sync = function sync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 + } + if (!made) made = null; + + p = path.resolve(p); + + try { + xfs.mkdirSync(p, mode); + made = made || p; + } + catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = sync(path.dirname(p), opts, made); + sync(p, opts, made); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat; + try { + stat = xfs.statSync(p); + } + catch (err1) /* istanbul ignore next */ { + throw err0; + } + /* istanbul ignore if */ + if (!stat.isDirectory()) throw err0; + break; + } + } - if (this.stat) - this._stat(e) -} + return made; +}; -GlobSync.prototype._readdirInGlobStar = function (abs) { - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false) +/***/ }), - var entries - var lstat - var stat - try { - lstat = this.fs.lstatSync(abs) - } catch (er) { - if (er.code === 'ENOENT') { - // lstat failed, doesn't exist - return null - } - } +/***/ 5060: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var isSym = lstat && lstat.isSymbolicLink() - this.symlinks[abs] = isSym +module.exports = rimraf +rimraf.sync = rimrafSync - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) - this.cache[abs] = 'FILE' - else - entries = this._readdir(abs, false) +var assert = __nccwpck_require__(9491) +var path = __nccwpck_require__(1017) +var fs = __nccwpck_require__(7147) +var glob = undefined +try { + glob = __nccwpck_require__(4966) +} catch (_err) { + // treat glob as optional. +} +var _0666 = parseInt('666', 8) - return entries +var defaultGlobOpts = { + nosort: true, + silent: true } -GlobSync.prototype._readdir = function (abs, inGlobStar) { - var entries +// for EMFILE handling +var timeout = 0 - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs) +var isWindows = (process.platform === "win32") - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return null +function defaults (options) { + var methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(function(m) { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) - if (Array.isArray(c)) - return c + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true } - - try { - return this._readdirEntries(abs, this.fs.readdirSync(abs)) - } catch (er) { - this._readdirError(abs, er) - return null + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts } -GlobSync.prototype._readdirEntries = function (abs, entries) { - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } +function rimraf (p, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} } - this.cache[abs] = entries + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') - // mark and cache dir-ness - return entries -} + defaults(options) -GlobSync.prototype._readdirError = function (f, er) { - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - throw error - } - break + var busyTries = 0 + var errState = null + var n = 0 - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) - throw er - if (!this.silent) - console.error('glob error', er) - break + options.lstat(p, function (er, stat) { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + + function next (er) { + errState = errState || er + if (--n === 0) + cb(errState) } -} -GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + function afterGlob (er, results) { + if (er) + return cb(er) - var entries = this._readdir(abs, inGlobStar) + n = results.length + if (n === 0) + return cb() - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return + results.forEach(function (p) { + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + var time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(function () { + rimraf_(p, options, CB) + }, time) + } - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(function () { + rimraf_(p, options, CB) + }, timeout ++) + } - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false) + // already gone + if (er.code === "ENOENT") er = null + } - var len = entries.length - var isSym = this.symlinks[abs] + timeout = 0 + next(er) + }) + }) + } +} - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, function (er, st) { + if (er && er.code === "ENOENT") + return cb(null) - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true) + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) - var below = gspref.concat(entries[i], remain) - this._process(below, index, true) - } + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, function (er) { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) } -GlobSync.prototype._processSimple = function (prefix, index) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var exists = this._stat(prefix) +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) + assert(er instanceof Error) - if (!this.matches[index]) - this.matches[index] = Object.create(null) + options.chmod(p, _0666, function (er2) { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, function(er3, stats) { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} - // If it doesn't exist, then just mark the lack of results - if (!exists) - return +function fixWinEPERMSync (p, options, er) { + assert(p) + assert(options) + if (er) + assert(er instanceof Error) - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } + try { + options.chmodSync(p, _0666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er } - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') + try { + var stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } - // Mark this as a match - this._emitMatch(index, prefix) + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) } -// Returns either 'DIR', 'FILE', or false -GlobSync.prototype._stat = function (f) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + assert(typeof cb === 'function') - if (f.length > this.maxLength) - return false + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, function (er) { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] +function rmkids(p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') - if (Array.isArray(c)) - c = 'DIR' + options.readdir(p, function (er, files) { + if (er) + return cb(er) + var n = files.length + if (n === 0) + return options.rmdir(p, cb) + var errState + files.forEach(function (f) { + rimraf(path.join(p, f), options, function (er) { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return c +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + options = options || {} + defaults(options) - if (needDir && c === 'FILE') - return false + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } + var results - var exists - var stat = this.statCache[abs] - if (!stat) { - var lstat + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { try { - lstat = this.fs.lstatSync(abs) + options.lstatSync(p) + results = [p] } catch (er) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return false - } - } - - if (lstat && lstat.isSymbolicLink()) { - try { - stat = this.fs.statSync(abs) - } catch (er) { - stat = lstat - } - } else { - stat = lstat + results = glob.sync(p, options.glob) } } - this.statCache[abs] = stat + if (!results.length) + return - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' + for (var i = 0; i < results.length; i++) { + var p = results[i] - this.cache[abs] = this.cache[abs] || c + try { + var st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return - if (needDir && c === 'FILE') - return false + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } - return c + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } } -GlobSync.prototype._mark = function (p) { - return common.mark(this, p) +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } } -GlobSync.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(function (f) { + rimrafSync(path.join(p, f), options) + }) + + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + var retries = isWindows ? 100 : 1 + var i = 0 + do { + var threw = true + try { + var ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) } /***/ }), -/***/ 7356: +/***/ 5763: /***/ ((module) => { @@ -24746,13 +21083,13 @@ function clone (obj) { /***/ }), -/***/ 7758: +/***/ 3273: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var fs = __nccwpck_require__(7147) -var polyfills = __nccwpck_require__(263) -var legacy = __nccwpck_require__(3086) -var clone = __nccwpck_require__(7356) +var polyfills = __nccwpck_require__(4170) +var legacy = __nccwpck_require__(4071) +var clone = __nccwpck_require__(5763) var util = __nccwpck_require__(3837) @@ -25201,7 +21538,7 @@ function retry () { /***/ }), -/***/ 3086: +/***/ 4071: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Stream = (__nccwpck_require__(2781).Stream) @@ -25326,7 +21663,7 @@ function legacy (fs) { /***/ }), -/***/ 263: +/***/ 4170: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var constants = __nccwpck_require__(2057) @@ -25688,12 +22025,12 @@ function patch (fs) { /***/ }), -/***/ 2492: +/***/ 2681: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var wrappy = __nccwpck_require__(2940) +var wrappy = __nccwpck_require__(766) var reqs = Object.create(null) -var once = __nccwpck_require__(1223) +var once = __nccwpck_require__(4822) module.exports = wrappy(inflight) @@ -25749,7 +22086,7 @@ function slice (args) { /***/ }), -/***/ 4124: +/***/ 445: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { try { @@ -25759,13 +22096,13 @@ try { module.exports = util.inherits; } catch (e) { /* istanbul ignore next */ - module.exports = __nccwpck_require__(8544); + module.exports = __nccwpck_require__(9019); } /***/ }), -/***/ 8544: +/***/ 9019: /***/ ((module) => { if (typeof Object.create === 'function') { @@ -25799,52 +22136,7 @@ if (typeof Object.create === 'function') { /***/ }), -/***/ 3287: -/***/ ((__unused_webpack_module, exports) => { - - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; - - -/***/ }), - -/***/ 893: +/***/ 6367: /***/ ((module) => { var toString = {}.toString; @@ -25856,371 +22148,7 @@ module.exports = Array.isArray || function (arr) { /***/ }), -/***/ 1360: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -var listenerCount = (__nccwpck_require__(2361).listenerCount) -// listenerCount isn't in node 0.10, so here's a basic polyfill -listenerCount = listenerCount || function (ee, event) { - var listeners = ee && ee._events && ee._events[event] - if (Array.isArray(listeners)) { - return listeners.length - } else if (typeof listeners === 'function') { - return 1 - } else { - return 0 - } -} - -module.exports = listenerCount - - -/***/ }), - -/***/ 7129: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - - -// A linked list to keep track of recently-used-ness -const Yallist = __nccwpck_require__(665) - -const MAX = Symbol('max') -const LENGTH = Symbol('length') -const LENGTH_CALCULATOR = Symbol('lengthCalculator') -const ALLOW_STALE = Symbol('allowStale') -const MAX_AGE = Symbol('maxAge') -const DISPOSE = Symbol('dispose') -const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') -const LRU_LIST = Symbol('lruList') -const CACHE = Symbol('cache') -const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') - -const naiveLength = () => 1 - -// lruList is a yallist where the head is the youngest -// item, and the tail is the oldest. the list contains the Hit -// objects as the entries. -// Each Hit object has a reference to its Yallist.Node. This -// never changes. -// -// cache is a Map (or PseudoMap) that matches the keys to -// the Yallist.Node object. -class LRUCache { - constructor (options) { - if (typeof options === 'number') - options = { max: options } - - if (!options) - options = {} - - if (options.max && (typeof options.max !== 'number' || options.max < 0)) - throw new TypeError('max must be a non-negative number') - // Kind of weird to have a default max of Infinity, but oh well. - const max = this[MAX] = options.max || Infinity - - const lc = options.length || naiveLength - this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc - this[ALLOW_STALE] = options.stale || false - if (options.maxAge && typeof options.maxAge !== 'number') - throw new TypeError('maxAge must be a number') - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false - this.reset() - } - - // resize the cache when the max changes. - set max (mL) { - if (typeof mL !== 'number' || mL < 0) - throw new TypeError('max must be a non-negative number') - - this[MAX] = mL || Infinity - trim(this) - } - get max () { - return this[MAX] - } - - set allowStale (allowStale) { - this[ALLOW_STALE] = !!allowStale - } - get allowStale () { - return this[ALLOW_STALE] - } - - set maxAge (mA) { - if (typeof mA !== 'number') - throw new TypeError('maxAge must be a non-negative number') - - this[MAX_AGE] = mA - trim(this) - } - get maxAge () { - return this[MAX_AGE] - } - - // resize the cache when the lengthCalculator changes. - set lengthCalculator (lC) { - if (typeof lC !== 'function') - lC = naiveLength - - if (lC !== this[LENGTH_CALCULATOR]) { - this[LENGTH_CALCULATOR] = lC - this[LENGTH] = 0 - this[LRU_LIST].forEach(hit => { - hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) - this[LENGTH] += hit.length - }) - } - trim(this) - } - get lengthCalculator () { return this[LENGTH_CALCULATOR] } - - get length () { return this[LENGTH] } - get itemCount () { return this[LRU_LIST].length } - - rforEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].tail; walker !== null;) { - const prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev - } - } - - forEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].head; walker !== null;) { - const next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next - } - } - - keys () { - return this[LRU_LIST].toArray().map(k => k.key) - } - - values () { - return this[LRU_LIST].toArray().map(k => k.value) - } - - reset () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) - } - - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list - } - - dump () { - return this[LRU_LIST].map(hit => - isStale(this, hit) ? false : { - k: hit.key, - v: hit.value, - e: hit.now + (hit.maxAge || 0) - }).toArray().filter(h => h) - } - - dumpLru () { - return this[LRU_LIST] - } - - set (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] - - if (maxAge && typeof maxAge !== 'number') - throw new TypeError('maxAge must be a number') - - const now = maxAge ? Date.now() : 0 - const len = this[LENGTH_CALCULATOR](value, key) - - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) - return false - } - - const node = this[CACHE].get(key) - const item = node.value - - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) - this[DISPOSE](key, item.value) - } - - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) - trim(this) - return true - } - - const hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) - this[DISPOSE](key, value) - - return false - } - - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true - } - - has (key) { - if (!this[CACHE].has(key)) return false - const hit = this[CACHE].get(key).value - return !isStale(this, hit) - } - - get (key) { - return get(this, key, true) - } - - peek (key) { - return get(this, key, false) - } - - pop () { - const node = this[LRU_LIST].tail - if (!node) - return null - - del(this, node) - return node.value - } - - del (key) { - del(this, this[CACHE].get(key)) - } - - load (arr) { - // reset the cache - this.reset() - - const now = Date.now() - // A previous serialized cache has the most recent items first - for (let l = arr.length - 1; l >= 0; l--) { - const hit = arr[l] - const expiresAt = hit.e || 0 - if (expiresAt === 0) - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - else { - const maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) - } - } - } - } - - prune () { - this[CACHE].forEach((value, key) => get(this, key, false)) - } -} - -const get = (self, key, doUse) => { - const node = self[CACHE].get(key) - if (node) { - const hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - return undefined - } else { - if (doUse) { - if (self[UPDATE_AGE_ON_GET]) - node.value.now = Date.now() - self[LRU_LIST].unshiftNode(node) - } - } - return hit.value - } -} - -const isStale = (self, hit) => { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) - return false - - const diff = Date.now() - hit.now - return hit.maxAge ? diff > hit.maxAge - : self[MAX_AGE] && (diff > self[MAX_AGE]) -} - -const trim = self => { - if (self[LENGTH] > self[MAX]) { - for (let walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { - // We know that we're about to delete this one, and also - // what the next least recently used key will be, so just - // go ahead and set it now. - const prev = walker.prev - del(self, walker) - walker = prev - } - } -} - -const del = (self, node) => { - if (node) { - const hit = node.value - if (self[DISPOSE]) - self[DISPOSE](hit.key, hit.value) - - self[LENGTH] -= hit.length - self[CACHE].delete(hit.key) - self[LRU_LIST].removeNode(node) - } -} - -class Entry { - constructor (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 - } -} - -const forEachStep = (self, fn, node, thisp) => { - let hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - hit = undefined - } - if (hit) - fn.call(thisp, hit.value, hit.key, self) -} - -module.exports = LRUCache - - -/***/ }), - -/***/ 7426: +/***/ 1962: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*! @@ -26239,7 +22167,7 @@ module.exports = __nccwpck_require__(3765) /***/ }), -/***/ 3583: +/***/ 9324: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { /*! @@ -26256,7 +22184,7 @@ module.exports = __nccwpck_require__(3765) * @private */ -var db = __nccwpck_require__(7426) +var db = __nccwpck_require__(1962) var extname = (__nccwpck_require__(1017).extname) /** @@ -26434,42109 +22362,43839 @@ function populateMaps (extensions, types) { /***/ }), -/***/ 3973: +/***/ 4822: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = minimatch -minimatch.Minimatch = Minimatch +var wrappy = __nccwpck_require__(766) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) -var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || { - sep: '/' -} -minimatch.sep = path.sep +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) -var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} -var expand = __nccwpck_require__(3717) + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) -var plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f } -// any single thing other than / -// don't need to escape / when using new RegExp() -var qmark = '[^/]' +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} -// * => any number of characters -var star = qmark + '*?' -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' +/***/ }), -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' +/***/ 5165: +/***/ ((module) => { -// characters that need to be escaped in RegExp. -var reSpecials = charSet('().*{}+?[]^$\\!') -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split('').reduce(function (set, c) { - set[c] = true - return set - }, {}) + +function posix(path) { + return path.charAt(0) === '/'; } -// normalizes slashes. -var slashSplit = /\/+/ +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; + + +/***/ }), + +/***/ 5371: +/***/ ((module) => { + + + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); } } -function ext (a, b) { - b = b || {} - var t = {} - Object.keys(a).forEach(function (k) { - t[k] = a[k] - }) - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - return t -} -minimatch.defaults = function (def) { - if (!def || typeof def !== 'object' || !Object.keys(def).length) { - return minimatch - } - var orig = minimatch +/***/ }), - var m = function minimatch (p, pattern, options) { - return orig(p, pattern, ext(def, options)) - } +/***/ 8903: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) - } - m.Minimatch.defaults = function defaults (options) { - return orig.defaults(ext(def, options)).Minimatch - } - m.filter = function filter (pattern, options) { - return orig.filter(pattern, ext(def, options)) - } - m.defaults = function defaults (options) { - return orig.defaults(ext(def, options)) +var parseUrl = (__nccwpck_require__(7310).parse); + +var DEFAULT_PORTS = { + ftp: 21, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443, +}; + +var stringEndsWith = String.prototype.endsWith || function(s) { + return s.length <= this.length && + this.indexOf(s, this.length - s.length) !== -1; +}; + +/** + * @param {string|object} url - The URL, or the result from url.parse. + * @return {string} The URL of the proxy that should handle the request to the + * given URL. If no proxy is set, this will be an empty string. + */ +function getProxyForUrl(url) { + var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; + var proto = parsedUrl.protocol; + var hostname = parsedUrl.host; + var port = parsedUrl.port; + if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { + return ''; // Don't proxy URLs without a valid scheme or host. } - m.makeRe = function makeRe (pattern, options) { - return orig.makeRe(pattern, ext(def, options)) + proto = proto.split(':', 1)[0]; + // Stripping ports in this way instead of using parsedUrl.hostname to make + // sure that the brackets around IPv6 addresses are kept. + hostname = hostname.replace(/:\d*$/, ''); + port = parseInt(port) || DEFAULT_PORTS[proto] || 0; + if (!shouldProxy(hostname, port)) { + return ''; // Don't proxy URLs that match NO_PROXY. } - m.braceExpand = function braceExpand (pattern, options) { - return orig.braceExpand(pattern, ext(def, options)) + var proxy = + getEnv('npm_config_' + proto + '_proxy') || + getEnv(proto + '_proxy') || + getEnv('npm_config_proxy') || + getEnv('all_proxy'); + if (proxy && proxy.indexOf('://') === -1) { + // Missing scheme in proxy, default to the requested URL's scheme. + proxy = proto + '://' + proxy; } + return proxy; +} - m.match = function (list, pattern, options) { - return orig.match(list, pattern, ext(def, options)) +/** + * Determines whether a given URL should be proxied. + * + * @param {string} hostname - The host name of the URL. + * @param {number} port - The effective port of the URL. + * @returns {boolean} Whether the given URL should be proxied. + * @private + */ +function shouldProxy(hostname, port) { + var NO_PROXY = + (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); + if (!NO_PROXY) { + return true; // Always proxy if NO_PROXY is not set. + } + if (NO_PROXY === '*') { + return false; // Never proxy if wildcard is set. } - return m + return NO_PROXY.split(/[,\s]/).every(function(proxy) { + if (!proxy) { + return true; // Skip zero-length hosts. + } + var parsedProxy = proxy.match(/^(.+):(\d+)$/); + var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; + var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; + if (parsedProxyPort && parsedProxyPort !== port) { + return true; // Skip if ports don't match. + } + + if (!/^[.*]/.test(parsedProxyHostname)) { + // No wildcards, so stop proxying if there is an exact match. + return hostname !== parsedProxyHostname; + } + + if (parsedProxyHostname.charAt(0) === '*') { + // Remove leading wildcard. + parsedProxyHostname = parsedProxyHostname.slice(1); + } + // Stop proxying if the hostname ends with the no_proxy host. + return !stringEndsWith.call(hostname, parsedProxyHostname); + }); } -Minimatch.defaults = function (def) { - return minimatch.defaults(def).Minimatch +/** + * Get the value for an environment variable. + * + * @param {string} key - The name of the environment variable. + * @return {string} The value of the environment variable. + * @private + */ +function getEnv(key) { + return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; } -function minimatch (p, pattern, options) { - assertValidPattern(pattern) +exports.j = getProxyForUrl; - if (!options) options = {} - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } +/***/ }), - return new Minimatch(pattern, options).match(p) -} +/***/ 540: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options) - } +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - assertValidPattern(pattern) +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. - if (!options) options = {} - pattern = pattern.trim() - // windows support: need to use /, not \ - if (!options.allowWindowsEscape && path.sep !== '/') { - pattern = pattern.split(path.sep).join('/') - } +/**/ - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - this.partial = !!options.partial +var pna = __nccwpck_require__(5371); +/**/ - // make the set of regexps etc. - this.make() -} +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ -Minimatch.prototype.debug = function () {} +module.exports = Duplex; -Minimatch.prototype.make = make -function make () { - var pattern = this.pattern - var options = this.options +/**/ +var util = Object.create(__nccwpck_require__(1307)); +util.inherits = __nccwpck_require__(445); +/**/ - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } +var Readable = __nccwpck_require__(9631); +var Writable = __nccwpck_require__(4840); - // step 1: figure out negation, etc. - this.parseNegate() +util.inherits(Duplex, Readable); - // step 2: expand braces - var set = this.globSet = this.braceExpand() +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} - if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); - this.debug(this.pattern, set) + Readable.call(this, options); + Writable.call(this, options); - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) + if (options && options.readable === false) this.readable = false; - this.debug(this.pattern, set) + if (options && options.writable === false) this.writable = false; - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - this.debug(this.pattern, set) + this.once('end', onend); +} - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return s.indexOf(false) === -1 - }) +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); - this.debug(this.pattern, set) +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; - this.set = set + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); } -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - var negate = false - var options = this.options - var negateOffset = 0 +function onEndNT(self) { + self.end(); +} - if (options.nonegate) return +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } - for (var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === '!' - ; i++) { - negate = !negate - negateOffset++ + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; } +}); - if (negateOffset) this.pattern = pattern.substr(negateOffset) - this.negate = negate -} +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg + pna.nextTick(cb, err); +}; + +/***/ }), + +/***/ 1279: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Copyright Joyent, Inc. and other Node contributors. // -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = function (pattern, options) { - return braceExpand(pattern, options) -} +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -Minimatch.prototype.braceExpand = braceExpand +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. -function braceExpand (pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options - } else { - options = {} - } - } - pattern = typeof pattern === 'undefined' - ? this.pattern : pattern - assertValidPattern(pattern) +module.exports = PassThrough; - // Thanks to Yeting Li for - // improving this regexp to avoid a ReDOS vulnerability. - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - // shortcut. no need to expand. - return [pattern] - } +var Transform = __nccwpck_require__(9873); - return expand(pattern) -} +/**/ +var util = Object.create(__nccwpck_require__(1307)); +util.inherits = __nccwpck_require__(445); +/**/ -var MAX_PATTERN_LENGTH = 1024 * 64 -var assertValidPattern = function (pattern) { - if (typeof pattern !== 'string') { - throw new TypeError('invalid pattern') - } +util.inherits(PassThrough, Transform); - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError('pattern is too long') - } +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); } -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 9631: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Copyright Joyent, Inc. and other Node contributors. // -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -Minimatch.prototype.parse = parse -var SUBPARSE = {} -function parse (pattern, isSub) { - assertValidPattern(pattern) +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - var options = this.options - // shortcuts - if (pattern === '**') { - if (!options.noglobstar) - return GLOBSTAR - else - pattern = '*' - } - if (pattern === '') return '' - var re = '' - var hasMagic = !!options.nocase - var escaping = false - // ? => one single character - var patternListStack = [] - var negativeLists = [] - var stateChar - var inClass = false - var reClassStart = -1 - var classStart = -1 - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. - var patternStart = pattern.charAt(0) === '.' ? '' // anything - // not (start or / followed by . or .. followed by / or end) - : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' - : '(?!\\.)' - var self = this +/**/ - function clearStateChar () { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - self.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } +var pna = __nccwpck_require__(5371); +/**/ - for (var i = 0, len = pattern.length, c - ; (i < len) && (c = pattern.charAt(i)) - ; i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) +module.exports = Readable; - // skip over any that are escaped. - if (escaping && reSpecials[c]) { - re += '\\' + c - escaping = false - continue - } +/**/ +var isArray = __nccwpck_require__(6367); +/**/ - switch (c) { - /* istanbul ignore next */ - case '/': { - // completely not allowed, even escaped. - // Should already be path-split by now. - return false - } +/**/ +var Duplex; +/**/ - case '\\': - clearStateChar() - escaping = true - continue +Readable.ReadableState = ReadableState; - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) +/**/ +var EE = (__nccwpck_require__(2361).EventEmitter); - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - self.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue +/**/ +var Stream = __nccwpck_require__(6009); +/**/ + +/**/ + +var Buffer = (__nccwpck_require__(8746).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = Object.create(__nccwpck_require__(1307)); +util.inherits = __nccwpck_require__(445); +/**/ + +/**/ +var debugUtil = __nccwpck_require__(3837); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = __nccwpck_require__(5049); +var destroyImpl = __nccwpck_require__(3701); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; - case '(': - if (inClass) { - re += '(' - continue - } +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); - if (!stateChar) { - re += '\\(' - continue - } + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }) - // negation is (?:(?!js)[^/]*) - re += stateChar === '!' ? '(?:(?!(?:' : '(?:' - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue +function ReadableState(options, stream) { + Duplex = Duplex || __nccwpck_require__(540); - case ')': - if (inClass || !patternListStack.length) { - re += '\\)' - continue - } + options = options || {}; - clearStateChar() - hasMagic = true - var pl = patternListStack.pop() - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(pl) - } - pl.reEnd = re.length - continue + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; - case '|': - if (inClass || !patternListStack.length || escaping) { - re += '\\|' - escaping = false - continue - } + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; - clearStateChar() - re += '|' - continue + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; - if (inClass) { - re += '\\' + c - continue - } + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; - inClass = true - classStart = i - reClassStart = re.length - re += c - continue + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - escaping = false - continue - } + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; - // handle the case where we left a class open. - // "[z-a]" is valid, equivalent to "\[z-a\]" - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - var cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + cs + ']') - } catch (er) { - // not a valid class! - var sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' - hasMagic = hasMagic || sp[1] - inClass = false - continue - } + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; - // finish up the class. - hasMagic = true - inClass = false - re += c - continue + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; - default: - // swallow any state char that wasn't consumed - clearStateChar() + // has it been destroyed + this.destroyed = false; - if (escaping) { - // no need - escaping = false - } else if (reSpecials[c] - && !(c === '^' && inClass)) { - re += '\\' - } + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - re += c + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; - } // switch - } // for + // if true, a maybeReadMore has been scheduled + this.readingMore = false; - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.substr(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(6461)/* .StringDecoder */ .s); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; } +} - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(540); - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) + if (!(this instanceof Readable)) return new Readable(options); - this.debug('tail=%j\n %s', tail, tail, pl, re) - var t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type + this._readableState = new ReadableState(options, this); - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } + // legacy + this.readable = true; - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } + if (options) { + if (typeof options.read === 'function') this._read = options.read; - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - var addPatternStart = false - switch (re.charAt(0)) { - case '[': case '.': case '(': addPatternStart = true + if (typeof options.destroy === 'function') this._destroy = options.destroy; } - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n] + Stream.call(this); +} - var nlBefore = re.slice(0, nl.reStart) - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) - var nlAfter = re.slice(nl.reEnd) +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } - nlLast += nlAfter + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - var openParensBefore = nlBefore.split('(').length - 1 - var cleanAfter = nlAfter - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; - var dollar = '' - if (nlAfter === '' && isSub !== SUBPARSE) { - dollar = '$' +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast - re = newRe + } else { + skipChunkCheck = true; } - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; - if (addPatternStart) { - re = patternStart + re - } +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } } - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); } + maybeReadMore(stream, state); +} - var flags = options.nocase ? 'i' : '' - try { - var regExp = new RegExp('^' + re + '$', flags) - } catch (er) /* istanbul ignore next - should be impossible */ { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); } + return er; +} - regExp._glob = pattern - regExp._src = re +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} - return regExp +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(6461)/* .StringDecoder */ .s); + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; } -minimatch.makeRe = function (pattern, options) { - return new Minimatch(pattern, options || {}).makeRe() +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; } -Minimatch.prototype.makeRe = makeRe -function makeRe () { - if (this.regexp || this.regexp === false) return this.regexp +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - var set = this.set + if (n !== 0) state.emittedReadable = false; - if (!set.length) { - this.regexp = false - return this.regexp + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; } - var options = this.options - var twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - var flags = options.nocase ? 'i' : '' + n = howMuchToRead(n, state); - var re = set.map(function (pattern) { - return pattern.map(function (p) { - return (p === GLOBSTAR) ? twoStar - : (typeof p === 'string') ? regExpEscape(p) - : p._src - }).join('\\\/') - }).join('|') + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); - try { - this.regexp = new RegExp(re, flags) - } catch (ex) /* istanbul ignore next - should be impossible */ { - this.regexp = false + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); } - return this.regexp -} -minimatch.match = function (list, pattern, options) { - options = options || {} - var mm = new Minimatch(pattern, options) - list = list.filter(function (f) { - return mm.match(f) - }) - if (mm.options.nonull && !list.length) { - list.push(pattern) + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); } - return list -} -Minimatch.prototype.match = function match (f, partial) { - if (typeof partial === 'undefined') partial = this.partial - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; - if (f === '/' && partial) return true + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } - var options = this.options + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); } - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) - - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. + if (ret !== null) this.emit('data', ret); - var set = this.set - this.debug(this.pattern, 'set', set) + return ret; +}; - // Find the basename of the path by looking for the last non-empty segment - var filename - var i - for (i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } } + state.ended = true; - for (i = 0; i < set.length; i++) { - var pattern = set[i] - var file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - var hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate - } + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); } +} - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); } -// set partial to true to test if, for example, -// "/a/b" matches the start of "/*/b/*/d" -// Partial means, if you run out of file before you run -// out of pattern, then that's fine, as long as all -// the parts match. -Minimatch.prototype.matchOne = function (file, pattern, partial) { - var options = this.options +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} - this.debug('matchOne', file.length, pattern.length) +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; - this.debug(pattern, p, f) + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - // should be impossible. - // some invalid regexp stuff in the set. - /* istanbul ignore if */ - if (p === false) return false + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); } + } + } - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] + function onend() { + debug('onend'); + dest.end(); + } - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } + cleanedUp = true; - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - /* istanbul ignore if */ - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + increasedAwaitDrain = true; } - return false + src.pause(); } + } - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - hit = f === p - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) - } + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } - if (!hit) return false + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); } + dest.once('finish', onfinish); - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else /* istanbul ignore else */ if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - return (fi === fl - 1) && (file[fi] === '') + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); } - // should be unreachable. - /* istanbul ignore next */ - throw new Error('wtf?') -} + return dest; +}; -// replace stuff like \* with * -function globUnescape (s) { - return s.replace(/\\(.)/g, '$1') +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; } -function regExpEscape (s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') -} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; -/***/ }), + if (!dest) dest = state.pipes; -/***/ 1077: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + // slow case. multiple pipe destinations. -const proc = - typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - } -const EE = __nccwpck_require__(2361) -const Stream = __nccwpck_require__(2781) -const stringdecoder = __nccwpck_require__(1576) -const SD = stringdecoder.StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFER = Symbol('buffer') -const PIPES = Symbol('pipes') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed') -// internal event when stream has an error -const ERROR = Symbol('error') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') -const ABORT = Symbol('abort') -const ABORTED = Symbol('aborted') -const SIGNAL = Symbol('signal') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = - (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') -const ITERATOR = - (doIter && Symbol.iterator) || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' - -const isArrayBuffer = b => - b instanceof ArrayBuffer || - (typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0) - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; -class Pipe { - constructor(src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe() { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors() {} - end() { - this.unpipe() - if (this.opts.end) this.dest.end() + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { hasUnpiped: false }); + }return this; } -} -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor(src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; -class Minipass extends Stream { - constructor(options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this[PIPES] = [] - this[BUFFER] = [] - this[OBJECTMODE] = (options && options.objectMode) || false - if (this[OBJECTMODE]) this[ENCODING] = null - else this[ENCODING] = (options && options.encoding) || null - if (this[ENCODING] === 'buffer') this[ENCODING] = null - this[ASYNC] = (options && !!options.async) || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) - } - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) - } - this[SIGNAL] = options && options.signal - this[ABORTED] = false - if (this[SIGNAL]) { - this[SIGNAL].addEventListener('abort', () => this[ABORT]()) - if (this[SIGNAL].aborted) { - this[ABORT]() - } - } - } - - get bufferLength() { - return this[BUFFERLENGTH] - } - - get encoding() { - return this[ENCODING] - } - set encoding(enc) { - if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; - if ( - this[ENCODING] && - enc !== this[ENCODING] && - ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) - ) - throw new Error('cannot change encoding') + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this[BUFFER].length) - this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } } - - this[ENCODING] = enc } - setEncoding(enc) { - this.encoding = enc - } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; - get objectMode() { - return this[OBJECTMODE] - } - set objectMode(om) { - this[OBJECTMODE] = this[OBJECTMODE] || !!om - } +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} - get ['async']() { - return this[ASYNC] - } - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); } + return this; +}; - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true - this.emit('abort', this[SIGNAL].reason) - this.destroy(this[SIGNAL].reason) +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); } +} - get aborted() { - return this[ABORTED] +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); } - set aborted(_) {} - write(chunk, encoding, cb) { - if (this[ABORTED]) return false - if (this[EOF]) throw new Error('write after end') + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} - if (this[DESTROYED]) { - this.emit( - 'error', - Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - ) - ) - return true - } +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} - if (!encoding) encoding = 'utf8' +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; - const fn = this[ASYNC] ? defer : f => f() + var state = this._readableState; + var paused = false; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) + _this.push(null); + }); - if (this[BUFFERLENGTH] !== 0) this.emit('readable') + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); - if (cb) fn(cb) + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - return this.flowing + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); } + }); - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - if (cb) fn(cb) - return this.flowing + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); } + } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if ( - typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed) - ) { - chunk = Buffer.from(chunk, encoding) - } + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + return this; +}; - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); - if (this[BUFFERLENGTH] !== 0) this.emit('readable') +// exposed for testing purposes only. +Readable._fromList = fromList; - if (cb) fn(cb) +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; - return this.flowing + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); } - read(n) { - if (this[DESTROYED]) return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } + return ret; +} - if (this[OBJECTMODE]) n = null +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] - else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; } - - const ret = this[READ](n || null, this[BUFFER][0]) - this[MAYBE_EMIT_END]() - return ret + ++c; } + list.length -= c; + return ret; +} - [READ](n, chunk) { - if (n === chunk.length || n === null) this[BUFFERSHIFT]() - else { - this[BUFFER][0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; } + ++c; + } + list.length -= c; + return ret; +} - this.emit('data', chunk) +function endReadable(stream) { + var state = stream._readableState; - if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); - return chunk + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); } +} - end(chunk, encoding, cb) { - if (typeof chunk === 'function') (cb = chunk), (chunk = null) - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - if (chunk) this.write(chunk, encoding) - if (cb) this.once('end', cb) - this[EOF] = true - this.writable = false +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() - return this +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; } + return -1; +} - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) return +/***/ }), - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this[BUFFER].length) this[FLUSH]() - else if (this[EOF]) this[MAYBE_EMIT_END]() - else this.emit('drain') - } +/***/ 9873: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - resume() { - return this[RESUME]() - } +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - pause() { - this[FLOWING] = false - this[PAUSED] = true - } +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. - get destroyed() { - return this[DESTROYED] - } - get flowing() { - return this[FLOWING] - } - get paused() { - return this[PAUSED] - } +module.exports = Transform; - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 - else this[BUFFERLENGTH] += chunk.length - this[BUFFER].push(chunk) - } +var Duplex = __nccwpck_require__(540); - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - return this[BUFFER].shift() - } +/**/ +var util = Object.create(__nccwpck_require__(1307)); +util.inherits = __nccwpck_require__(445); +/**/ - [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) +util.inherits(Transform, Duplex); - if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') - } +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; - [FLUSHCHUNK](chunk) { - this.emit('data', chunk) - return this.flowing + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); } - pipe(dest, opts) { - if (this[DESTROYED]) return + ts.writechunk = null; + ts.writecb = null; - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) opts.end = false - else opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); - // piping an ended stream ends immediately - if (ended) { - if (opts.end) dest.end() - } else { - this[PIPES].push( - !opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts) - ) - if (this[ASYNC]) defer(() => this[RESUME]()) - else this[RESUME]() - } + cb(er); - return dest + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); } +} - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest) - if (p) { - this[PIPES].splice(this[PIPES].indexOf(p), 1) - p.unpipe() - } - } +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); - addListener(ev, fn) { - return this.on(ev, fn) - } + Duplex.call(this, options); - on(ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) - else fn.call(this, this[EMITTED_ERROR]) - } - return ret - } + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; - get emittedEnd() { - return this[EMITTED_END] - } + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; - [MAYBE_EMIT_END]() { - if ( - !this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF] - ) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) this.emit('close') - this[EMITTING_END] = false - } - } + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; - emit(ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - super.emit(ERROR, data) - const ret = - !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret + if (typeof options.flush === 'function') this._flush = options.flush; } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} - [EMITEND]() { - if (this[EMITTED_END]) return +function prefinish() { + var _this = this; - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) defer(() => this[EMITEND2]()) - else this[EMITEND2]() + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); } +} - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data) - } - super.emit('data', data) - } - } +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; - for (const p of this[PIPES]) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; - // const all = await stream.collect() - collect() { - const buf = [] - if (!this[OBJECTMODE]) buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) buf.dataLength += c.length - }) - return p.then(() => buf) +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); } +}; - // const data = await stream.concat() - concat() { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength) - ) - } +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; - // stream.promise().then(() => done, er => emitted error) - promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; } +}; - // for await (let chunk of stream) - [ASYNCITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - stopped = true - return Promise.resolve({ done: true }) - } - const next = () => { - if (stopped) return stop() - const res = this.read() - if (res !== null) return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) return stop() - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - stop() - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - this.removeListener(DESTROYED, ondestroy) - stop() - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; - return { - next, - throw: stop, - return: stop, - [ASYNCITERATOR]() { - return this - }, - } - } + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; - // for (let chunk of stream) - [ITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - this.removeListener(ERROR, stop) - this.removeListener(DESTROYED, stop) - this.removeListener('end', stop) - stopped = true - return { done: true } - } +function done(stream, er, data) { + if (er) return stream.emit('error', er); - const next = () => { - if (stopped) return stop() - const value = this.read() - return value === null ? stop() : { value } - } - this.once('end', stop) - this.once(ERROR, stop) - this.once(DESTROYED, stop) + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); - return { - next, - throw: stop, - return: stop, - [ITERATOR]() { - return this - }, - } - } + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); - destroy(er) { - if (this[DESTROYED]) { - if (er) this.emit('error', er) - else this.emit(DESTROYED) - return this - } + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} + +/***/ }), + +/***/ 4840: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this[DESTROYED] = true +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0 - this[BUFFERLENGTH] = 0 +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. - if (typeof this.close === 'function' && !this[CLOSED]) this.close() - if (er) this.emit('error', er) - // if no error to emit, still reject pending promises - else this.emit(DESTROYED) - return this - } +/**/ - static isStream(s) { - return ( - !!s && - (s instanceof Minipass || - s instanceof Stream || - (s instanceof EE && - // readable - (typeof s.pipe === 'function' || - // writable - (typeof s.write === 'function' && typeof s.end === 'function')))) - ) - } -} +var pna = __nccwpck_require__(5371); +/**/ -exports.Minipass = Minipass +module.exports = Writable; +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} -/***/ }), +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; -/***/ 6769: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ -// Update with any zlib constants that are added or changed in the future. -// Node v6 didn't export this, so we just hard code the version and rely -// on all the other hard-coded values from zlib v4736. When node v6 -// support drops, we can just export the realZlibConstants object. -const realZlibConstants = (__nccwpck_require__(9796).constants) || - /* istanbul ignore next */ { ZLIB_VERNUM: 4736 } - -module.exports = Object.freeze(Object.assign(Object.create(null), { - Z_NO_FLUSH: 0, - Z_PARTIAL_FLUSH: 1, - Z_SYNC_FLUSH: 2, - Z_FULL_FLUSH: 3, - Z_FINISH: 4, - Z_BLOCK: 5, - Z_OK: 0, - Z_STREAM_END: 1, - Z_NEED_DICT: 2, - Z_ERRNO: -1, - Z_STREAM_ERROR: -2, - Z_DATA_ERROR: -3, - Z_MEM_ERROR: -4, - Z_BUF_ERROR: -5, - Z_VERSION_ERROR: -6, - Z_NO_COMPRESSION: 0, - Z_BEST_SPEED: 1, - Z_BEST_COMPRESSION: 9, - Z_DEFAULT_COMPRESSION: -1, - Z_FILTERED: 1, - Z_HUFFMAN_ONLY: 2, - Z_RLE: 3, - Z_FIXED: 4, - Z_DEFAULT_STRATEGY: 0, - DEFLATE: 1, - INFLATE: 2, - GZIP: 3, - GUNZIP: 4, - DEFLATERAW: 5, - INFLATERAW: 6, - UNZIP: 7, - BROTLI_DECODE: 8, - BROTLI_ENCODE: 9, - Z_MIN_WINDOWBITS: 8, - Z_MAX_WINDOWBITS: 15, - Z_DEFAULT_WINDOWBITS: 15, - Z_MIN_CHUNK: 64, - Z_MAX_CHUNK: Infinity, - Z_DEFAULT_CHUNK: 16384, - Z_MIN_MEMLEVEL: 1, - Z_MAX_MEMLEVEL: 9, - Z_DEFAULT_MEMLEVEL: 8, - Z_MIN_LEVEL: -1, - Z_MAX_LEVEL: 9, - Z_DEFAULT_LEVEL: -1, - BROTLI_OPERATION_PROCESS: 0, - BROTLI_OPERATION_FLUSH: 1, - BROTLI_OPERATION_FINISH: 2, - BROTLI_OPERATION_EMIT_METADATA: 3, - BROTLI_MODE_GENERIC: 0, - BROTLI_MODE_TEXT: 1, - BROTLI_MODE_FONT: 2, - BROTLI_DEFAULT_MODE: 0, - BROTLI_MIN_QUALITY: 0, - BROTLI_MAX_QUALITY: 11, - BROTLI_DEFAULT_QUALITY: 11, - BROTLI_MIN_WINDOW_BITS: 10, - BROTLI_MAX_WINDOW_BITS: 24, - BROTLI_LARGE_MAX_WINDOW_BITS: 30, - BROTLI_DEFAULT_WINDOW: 22, - BROTLI_MIN_INPUT_BLOCK_BITS: 16, - BROTLI_MAX_INPUT_BLOCK_BITS: 24, - BROTLI_PARAM_MODE: 0, - BROTLI_PARAM_QUALITY: 1, - BROTLI_PARAM_LGWIN: 2, - BROTLI_PARAM_LGBLOCK: 3, - BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, - BROTLI_PARAM_SIZE_HINT: 5, - BROTLI_PARAM_LARGE_WINDOW: 6, - BROTLI_PARAM_NPOSTFIX: 7, - BROTLI_PARAM_NDIRECT: 8, - BROTLI_DECODER_RESULT_ERROR: 0, - BROTLI_DECODER_RESULT_SUCCESS: 1, - BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, - BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, - BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, - BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, - BROTLI_DECODER_NO_ERROR: 0, - BROTLI_DECODER_SUCCESS: 1, - BROTLI_DECODER_NEEDS_MORE_INPUT: 2, - BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, - BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, - BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, - BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, - BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, - BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, - BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, - BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, - BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, - BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, - BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, - BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, - BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, - BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, - BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, - BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, - BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, - BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, - BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, - BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, - BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, - BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, - BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, - BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, - BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, - BROTLI_DECODER_ERROR_UNREACHABLE: -31, -}, realZlibConstants)) +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ +/**/ +var Duplex; +/**/ -/***/ }), +Writable.WritableState = WritableState; -/***/ 3486: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/**/ +var util = Object.create(__nccwpck_require__(1307)); +util.inherits = __nccwpck_require__(445); +/**/ +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(9066) +}; +/**/ +/**/ +var Stream = __nccwpck_require__(6009); +/**/ -const assert = __nccwpck_require__(9491) -const Buffer = (__nccwpck_require__(4300).Buffer) -const realZlib = __nccwpck_require__(9796) +/**/ -const constants = exports.constants = __nccwpck_require__(6769) -const Minipass = __nccwpck_require__(7557) +var Buffer = (__nccwpck_require__(8746).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} -const OriginalBufferConcat = Buffer.concat +/**/ -const _superWrite = Symbol('_superWrite') -class ZlibError extends Error { - constructor (err) { - super('zlib: ' + err.message) - this.code = err.code - this.errno = err.errno - /* istanbul ignore if */ - if (!this.code) - this.code = 'ZLIB_ERROR' +var destroyImpl = __nccwpck_require__(3701); - this.message = 'zlib: ' + err.message - Error.captureStackTrace(this, this.constructor) - } +util.inherits(Writable, Stream); - get name () { - return 'ZlibError' - } -} +function nop() {} -// the Zlib class they all inherit from -// This thing manages the queue of requests, and returns -// true or false if there is anything in the queue when -// you call the .write() method. -const _opts = Symbol('opts') -const _flushFlag = Symbol('flushFlag') -const _finishFlushFlag = Symbol('finishFlushFlag') -const _fullFlushFlag = Symbol('fullFlushFlag') -const _handle = Symbol('handle') -const _onError = Symbol('onError') -const _sawError = Symbol('sawError') -const _level = Symbol('level') -const _strategy = Symbol('strategy') -const _ended = Symbol('ended') -const _defaultFullFlush = Symbol('_defaultFullFlush') +function WritableState(options, stream) { + Duplex = Duplex || __nccwpck_require__(540); -class ZlibBase extends Minipass { - constructor (opts, mode) { - if (!opts || typeof opts !== 'object') - throw new TypeError('invalid options for ZlibBase constructor') + options = options || {}; - super(opts) - this[_sawError] = false - this[_ended] = false - this[_opts] = opts + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; - this[_flushFlag] = opts.flush - this[_finishFlushFlag] = opts.finishFlush - // this will throw if any options are invalid for the class selected - try { - this[_handle] = new realZlib[mode](opts) - } catch (er) { - // make sure that all errors get decorated properly - throw new ZlibError(er) - } + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; - this[_onError] = (err) => { - // no sense raising multiple errors, since we abort on the first one. - if (this[_sawError]) - return + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - this[_sawError] = true + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; - // there is no way to cleanly recover. - // continuing only obscures problems. - this.close() - this.emit('error', err) - } + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; - this[_handle].on('error', er => this[_onError](new ZlibError(er))) - this.once('end', () => this.close) - } + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); - close () { - if (this[_handle]) { - this[_handle].close() - this[_handle] = null - this.emit('close') - } - } + // if _final has been called + this.finalCalled = false; - reset () { - if (!this[_sawError]) { - assert(this[_handle], 'zlib binding closed') - return this[_handle].reset() - } - } + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; - flush (flushFlag) { - if (this.ended) - return + // has it been destroyed + this.destroyed = false; - if (typeof flushFlag !== 'number') - flushFlag = this[_fullFlushFlag] - this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })) - } + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; - end (chunk, encoding, cb) { - if (chunk) - this.write(chunk, encoding) - this.flush(this[_finishFlushFlag]) - this[_ended] = true - return super.end(null, null, cb) - } + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - get ended () { - return this[_ended] - } + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; - write (chunk, encoding, cb) { - // process the chunk using the sync process - // then super.write() all the outputted chunks - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' + // a flag to see when we're in the middle of a write. + this.writing = false; - if (typeof chunk === 'string') - chunk = Buffer.from(chunk, encoding) + // when true all writes will be buffered until .uncork() call + this.corked = 0; - if (this[_sawError]) - return - assert(this[_handle], 'zlib binding closed') - - // _processChunk tries to .close() the native handle after it's done, so we - // intercept that by temporarily making it a no-op. - const nativeHandle = this[_handle]._handle - const originalNativeClose = nativeHandle.close - nativeHandle.close = () => {} - const originalClose = this[_handle].close - this[_handle].close = () => {} - // It also calls `Buffer.concat()` at the end, which may be convenient - // for some, but which we are not interested in as it slows us down. - Buffer.concat = (args) => args - let result - try { - const flushFlag = typeof chunk[_flushFlag] === 'number' - ? chunk[_flushFlag] : this[_flushFlag] - result = this[_handle]._processChunk(chunk, flushFlag) - // if we don't throw, reset it back how it was - Buffer.concat = OriginalBufferConcat - } catch (err) { - // or if we do, put Buffer.concat() back before we emit error - // Error events call into user code, which may call Buffer.concat() - Buffer.concat = OriginalBufferConcat - this[_onError](new ZlibError(err)) - } finally { - if (this[_handle]) { - // Core zlib resets `_handle` to null after attempting to close the - // native handle. Our no-op handler prevented actual closure, but we - // need to restore the `._handle` property. - this[_handle]._handle = nativeHandle - nativeHandle.close = originalNativeClose - this[_handle].close = originalClose - // `_processChunk()` adds an 'error' listener. If we don't remove it - // after each call, these handlers start piling up. - this[_handle].removeAllListeners('error') - // make sure OUR error listener is still attached tho - } - } - - if (this[_handle]) - this[_handle].on('error', er => this[_onError](new ZlibError(er))) - - let writeReturn - if (result) { - if (Array.isArray(result) && result.length > 0) { - // The first buffer is always `handle._outBuffer`, which would be - // re-used for later invocations; so, we always have to copy that one. - writeReturn = this[_superWrite](Buffer.from(result[0])) - for (let i = 1; i < result.length; i++) { - writeReturn = this[_superWrite](result[i]) - } - } else { - writeReturn = this[_superWrite](Buffer.from(result)) - } - } + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; - if (cb) - cb() - return writeReturn - } + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; - [_superWrite] (data) { - return super.write(data) - } -} + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; -class Zlib extends ZlibBase { - constructor (opts, mode) { - opts = opts || {} + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; - opts.flush = opts.flush || constants.Z_NO_FLUSH - opts.finishFlush = opts.finishFlush || constants.Z_FINISH - super(opts, mode) + // the amount that is being written when _write is called. + this.writelen = 0; - this[_fullFlushFlag] = constants.Z_FULL_FLUSH - this[_level] = opts.level - this[_strategy] = opts.strategy - } + this.bufferedRequest = null; + this.lastBufferedRequest = null; - params (level, strategy) { - if (this[_sawError]) - return + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; - if (!this[_handle]) - throw new Error('cannot switch params when binding is closed') + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; - // no way to test this without also not supporting params at all - /* istanbul ignore if */ - if (!this[_handle].params) - throw new Error('not supported in this implementation') - - if (this[_level] !== level || this[_strategy] !== strategy) { - this.flush(constants.Z_SYNC_FLUSH) - assert(this[_handle], 'zlib binding closed') - // .params() calls .flush(), but the latter is always async in the - // core zlib. We override .flush() temporarily to intercept that and - // flush synchronously. - const origFlush = this[_handle].flush - this[_handle].flush = (flushFlag, cb) => { - this.flush(flushFlag) - cb() - } - try { - this[_handle].params(level, strategy) - } finally { - this[_handle].flush = origFlush - } - /* istanbul ignore else */ - if (this[_handle]) { - this[_level] = level - this[_strategy] = strategy - } - } - } -} + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; -// minimal 2-byte header -class Deflate extends Zlib { - constructor (opts) { - super(opts, 'Deflate') - } -} + // count buffered requests + this.bufferedRequestCount = 0; -class Inflate extends Zlib { - constructor (opts) { - super(opts, 'Inflate') - } + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); } -// gzip - bigger header, same deflate compression -const _portable = Symbol('_portable') -class Gzip extends Zlib { - constructor (opts) { - super(opts, 'Gzip') - this[_portable] = opts && !!opts.portable +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; } + return out; +}; - [_superWrite] (data) { - if (!this[_portable]) - return super[_superWrite](data) +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); - // we'll always get the header emitted in one first chunk - // overwrite the OS indicator byte with 0xFF - this[_portable] = false - data[9] = 255 - return super[_superWrite](data) - } -} +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; -class Gunzip extends Zlib { - constructor (opts) { - super(opts, 'Gunzip') - } + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; } -// raw - no header -class DeflateRaw extends Zlib { - constructor (opts) { - super(opts, 'DeflateRaw') - } -} +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(540); -class InflateRaw extends Zlib { - constructor (opts) { - super(opts, 'InflateRaw') - } -} + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. -// auto-detect header. -class Unzip extends Zlib { - constructor (opts) { - super(opts, 'Unzip') + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); } -} -class Brotli extends ZlibBase { - constructor (opts, mode) { - opts = opts || {} + this._writableState = new WritableState(options, this); - opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS - opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH + // legacy. + this.writable = true; - super(opts, mode) + if (options) { + if (typeof options.write === 'function') this._write = options.write; - this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH - } -} + if (typeof options.writev === 'function') this._writev = options.writev; -class BrotliCompress extends Brotli { - constructor (opts) { - super(opts, 'BrotliCompress') - } -} + if (typeof options.destroy === 'function') this._destroy = options.destroy; -class BrotliDecompress extends Brotli { - constructor (opts) { - super(opts, 'BrotliDecompress') + if (typeof options.final === 'function') this._final = options.final; } -} -exports.Deflate = Deflate -exports.Inflate = Inflate -exports.Gzip = Gzip -exports.Gunzip = Gunzip -exports.DeflateRaw = DeflateRaw -exports.InflateRaw = InflateRaw -exports.Unzip = Unzip -/* istanbul ignore else */ -if (typeof realZlib.BrotliCompress === 'function') { - exports.BrotliCompress = BrotliCompress - exports.BrotliDecompress = BrotliDecompress -} else { - exports.BrotliCompress = exports.BrotliDecompress = class { - constructor () { - throw new Error('Brotli is not supported in this version of Node.js') - } - } + Stream.call(this); } +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; -/***/ }), - -/***/ 7557: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - -const proc = typeof process === 'object' && process ? process : { - stdout: null, - stderr: null, +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); } -const EE = __nccwpck_require__(2361) -const Stream = __nccwpck_require__(2781) -const SD = (__nccwpck_require__(1576).StringDecoder) - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' - -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) -class Pipe { - constructor (src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe () { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors () {} - end () { - this.unpipe() - if (this.opts.end) - this.dest.end() - } -} +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; -class PipeProxyErrors extends Pipe { - unpipe () { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); } - constructor (src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; } + return valid; } -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = [] - this.buffer = [] - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[ASYNC] = options && !!options.async || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - } - - get bufferLength () { return this[BUFFERLENGTH] } - - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') - - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) - } +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); - this[ENCODING] = enc + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); } - setEncoding (enc) { - this.encoding = enc + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; } - get objectMode () { return this[OBJECTMODE] } - set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } - - get ['async'] () { return this[ASYNC] } - set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } - - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } + if (typeof cb !== 'function') cb = nop; - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } - if (!encoding) - encoding = 'utf8' + return ret; +}; - const fn = this[ASYNC] ? defer : f => f() +Writable.prototype.cork = function () { + var state = this._writableState; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } + state.corked++; +}; - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) +Writable.prototype.uncork = function () { + var state = this._writableState; - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) + if (state.corked) { + state.corked--; - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; - if (cb) - fn(cb) +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; - return this.flowing - } +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - fn(cb) - return this.flowing - } +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing } + var len = state.objectMode ? 1 : chunk.length; - read (n) { - if (this[DESTROYED]) - return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } + state.length += len; - if (this[OBJECTMODE]) - n = null + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = [this.buffer.join('')] - else - this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])] + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; } - - const ret = this[READ](n || null, this.buffer[0]) - this[MAYBE_EMIT_END]() - return ret + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); } - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer[0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) + return ret; +} - if (!this.buffer.length && !this[EOF]) - this.emit('drain') +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} - return chunk - } +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); } +} - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() - else - this.emit('drain') - } +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} - resume () { - return this[RESUME]() - } +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; - pause () { - this[FLOWING] = false - this[PAUSED] = true - } + onwriteStateUpdate(state); - get destroyed () { - return this[DESTROYED] - } + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); - get flowing () { - return this[FLOWING] - } + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } - get paused () { - return this[PAUSED] + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } } +} - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - this.buffer.push(chunk) - } +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer[0].length - } - return this.buffer.shift() +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); } +} - [FLUSH] (noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) - - if (!noDrain && !this.buffer.length && !this[EOF]) - this.emit('drain') - } +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false - } + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; - pipe (dest, opts) { - if (this[DESTROYED]) - return + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false - else - opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors + doWrite(stream, state, true, state.length, buffer, '', holder.finish); - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end() + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; } else { - this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)) - if (this[ASYNC]) - defer(() => this[RESUME]()) - else - this[RESUME]() + state.corkedRequestsFree = new CorkedRequest(state); } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; - return dest - } - - unpipe (dest) { - const p = this.pipes.find(p => p.dest === dest) - if (p) { - this.pipes.splice(this.pipes.indexOf(p), 1) - p.unpipe() + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } } - } - - addListener (ev, fn) { - return this.on(ev, fn) - } - on (ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) - defer(() => fn.call(this, this[EMITTED_ERROR])) - else - fn.call(this, this[EMITTED_ERROR]) - } - return ret + if (entry === null) state.lastBufferedRequest = null; } - get emittedEnd () { - return this[EMITTED_END] - } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false - } - } +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; - emit (ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !data ? false - : this[ASYNC] ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - const ret = super.emit('error', data) - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } +Writable.prototype._writev = null; - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; - [EMITDATA] (data) { - for (const p of this.pipes) { - if (p.dest.write(data) === false) - this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; } - [EMITEND] () { - if (this[EMITTED_END]) - return + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) - defer(() => this[EMITEND2]()) - else - this[EMITEND2]() + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); } - [EMITEND2] () { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this.pipes) { - p.dest.write(data) - } - super.emit('data', data) - } - } + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); +}; - for (const p of this.pipes) { - p.end() +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret } +} - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } } + return need; +} - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); } + state.ended = true; + stream.writable = false; +} - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; } - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) - return Promise.resolve({ done: true }) - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { next } - } + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; } - return { next } - } - - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; } - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this.buffer.length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() - - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) - - return this - } - - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; } -} +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; /***/ }), -/***/ 6186: +/***/ 5049: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const optsArg = __nccwpck_require__(2853) -const pathArg = __nccwpck_require__(2930) -const {mkdirpNative, mkdirpNativeSync} = __nccwpck_require__(4983) -const {mkdirpManual, mkdirpManualSync} = __nccwpck_require__(356) -const {useNative, useNativeSync} = __nccwpck_require__(4518) +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } -const mkdirp = (path, opts) => { - path = pathArg(path) - opts = optsArg(opts) - return useNative(opts) - ? mkdirpNative(path, opts) - : mkdirpManual(path, opts) -} +var Buffer = (__nccwpck_require__(8746).Buffer); +var util = __nccwpck_require__(3837); -const mkdirpSync = (path, opts) => { - path = pathArg(path) - opts = optsArg(opts) - return useNativeSync(opts) - ? mkdirpNativeSync(path, opts) - : mkdirpManualSync(path, opts) +function copyBuffer(src, target, offset) { + src.copy(target, offset); } -mkdirp.sync = mkdirpSync -mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts)) -mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts)) -mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts)) -mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts)) +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); -module.exports = mkdirp + this.head = null; + this.tail = null; + this.length = 0; + } + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; -/***/ }), + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; -/***/ 4992: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; -const {dirname} = __nccwpck_require__(1017) + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; -const findMade = (opts, parent, path = undefined) => { - // we never want the 'made' return value to be a root directory - if (path === parent) - return Promise.resolve() + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; - return opts.statAsync(parent).then( - st => st.isDirectory() ? path : undefined, // will fail later - er => er.code === 'ENOENT' - ? findMade(opts, dirname(parent), parent) - : undefined - ) -} + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; -const findMadeSync = (opts, parent, path = undefined) => { - if (path === parent) - return undefined + return BufferList; +}(); - try { - return opts.statSync(parent).isDirectory() ? path : undefined - } catch (er) { - return er.code === 'ENOENT' - ? findMadeSync(opts, dirname(parent), parent) - : undefined - } +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; } -module.exports = {findMade, findMadeSync} - - /***/ }), -/***/ 356: +/***/ 3701: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const {dirname} = __nccwpck_require__(1017) - -const mkdirpManual = (path, opts, made) => { - opts.recursive = false - const parent = dirname(path) - if (parent === path) { - return opts.mkdirAsync(path, opts).catch(er => { - // swallowed by recursive implementation on posix systems - // any other error is a failure - if (er.code !== 'EISDIR') - throw er - }) - } - - return opts.mkdirAsync(path, opts).then(() => made || path, er => { - if (er.code === 'ENOENT') - return mkdirpManual(parent, opts) - .then(made => mkdirpManual(path, opts, made)) - if (er.code !== 'EEXIST' && er.code !== 'EROFS') - throw er - return opts.statAsync(path).then(st => { - if (st.isDirectory()) - return made - else - throw er - }, () => { throw er }) - }) -} - -const mkdirpManualSync = (path, opts, made) => { - const parent = dirname(path) - opts.recursive = false - - if (parent === path) { - try { - return opts.mkdirSync(path, opts) - } catch (er) { - // swallowed by recursive implementation on posix systems - // any other error is a failure - if (er.code !== 'EISDIR') - throw er - else - return - } - } - - try { - opts.mkdirSync(path, opts) - return made || path - } catch (er) { - if (er.code === 'ENOENT') - return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made)) - if (er.code !== 'EEXIST' && er.code !== 'EROFS') - throw er - try { - if (!opts.statSync(path).isDirectory()) - throw er - } catch (_) { - throw er - } - } -} -module.exports = {mkdirpManual, mkdirpManualSync} +/**/ -/***/ }), +var pna = __nccwpck_require__(5371); +/**/ -/***/ 4983: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; -const {dirname} = __nccwpck_require__(1017) -const {findMade, findMadeSync} = __nccwpck_require__(4992) -const {mkdirpManual, mkdirpManualSync} = __nccwpck_require__(356) - -const mkdirpNative = (path, opts) => { - opts.recursive = true - const parent = dirname(path) - if (parent === path) - return opts.mkdirAsync(path, opts) - - return findMade(opts, path).then(made => - opts.mkdirAsync(path, opts).then(() => made) - .catch(er => { - if (er.code === 'ENOENT') - return mkdirpManual(path, opts) - else - throw er - })) -} + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; -const mkdirpNativeSync = (path, opts) => { - opts.recursive = true - const parent = dirname(path) - if (parent === path) - return opts.mkdirSync(path, opts) + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + pna.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, this, err); + } + } - const made = findMadeSync(opts, path) - try { - opts.mkdirSync(path, opts) - return made - } catch (er) { - if (er.code === 'ENOENT') - return mkdirpManualSync(path, opts) - else - throw er + return this; } -} - -module.exports = {mkdirpNative, mkdirpNativeSync} + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks -/***/ }), + if (this._readableState) { + this._readableState.destroyed = true; + } -/***/ 2853: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } -const { promisify } = __nccwpck_require__(3837) -const fs = __nccwpck_require__(7147) -const optsArg = opts => { - if (!opts) - opts = { mode: 0o777, fs } - else if (typeof opts === 'object') - opts = { mode: 0o777, fs, ...opts } - else if (typeof opts === 'number') - opts = { mode: opts, fs } - else if (typeof opts === 'string') - opts = { mode: parseInt(opts, 8), fs } - else - throw new TypeError('invalid options argument') + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + pna.nextTick(emitErrorNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, _this, err); + } + } else if (cb) { + cb(err); + } + }); - opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir - opts.mkdirAsync = promisify(opts.mkdir) - opts.stat = opts.stat || opts.fs.stat || fs.stat - opts.statAsync = promisify(opts.stat) - opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync - opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync - return opts + return this; } -module.exports = optsArg - - -/***/ }), - -/***/ 2930: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform -const { resolve, parse } = __nccwpck_require__(1017) -const pathArg = path => { - if (/\0/.test(path)) { - // simulate same failure that node raises - throw Object.assign( - new TypeError('path must be a string without null bytes'), - { - path, - code: 'ERR_INVALID_ARG_VALUE', - } - ) +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; } - path = resolve(path) - if (platform === 'win32') { - const badWinChars = /[*|"<>?:]/ - const {root} = parse(path) - if (badWinChars.test(path.substr(root.length))) { - throw Object.assign(new Error('Illegal characters in path.'), { - path, - code: 'EINVAL', - }) - } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; } +} - return path +function emitErrorNT(self, err) { + self.emit('error', err); } -module.exports = pathArg +module.exports = { + destroy: destroy, + undestroy: undestroy +}; /***/ }), -/***/ 4518: +/***/ 6009: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const fs = __nccwpck_require__(7147) - -const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version -const versArr = version.replace(/^v/, '').split('.') -const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12 - -const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir -const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync - -module.exports = {useNative, useNativeSync} +module.exports = __nccwpck_require__(2781); /***/ }), -/***/ 467: +/***/ 9025: /***/ ((module, exports, __nccwpck_require__) => { - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(__nccwpck_require__(2781)); -var http = _interopDefault(__nccwpck_require__(3685)); -var Url = _interopDefault(__nccwpck_require__(7310)); -var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); -var https = _interopDefault(__nccwpck_require__(5687)); -var zlib = _interopDefault(__nccwpck_require__(9796)); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); +var Stream = __nccwpck_require__(2781); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(9631); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(4840); + exports.Duplex = __nccwpck_require__(540); + exports.Transform = __nccwpck_require__(9873); + exports.PassThrough = __nccwpck_require__(1279); } -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = (__nccwpck_require__(2877).convert); -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; +/***/ }), -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); +/***/ 8746: +/***/ ((module, exports, __nccwpck_require__) => { -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(4300) +var Buffer = buffer.Buffer -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } } - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / { -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } -} +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } -} + constructor (comp, options) { + options = parseOptions(options) -/** - * Find the key in the map object given a header name. - * - * Returns undefined if not found. - * - * @param String name Header name - * @return String|Undefined - */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } - } - return undefined; -} + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); + debug('comp', this) + } -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } - return obj; -} + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); + toString () { + return this.value + } -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); + test (version) { + debug('Comparator.test', version, this.options.loose) -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); + if (this.semver === ANY || version === ANY) { + return true + } -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; + return cmp(version, this.operator, this.semver, this.options) + } -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + options = parseOptions(options) -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } } -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); +module.exports = Comparator -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); +const parseOptions = __nccwpck_require__(3526) +const { safeRe: re, t } = __nccwpck_require__(9274) +const cmp = __nccwpck_require__(1001) +const debug = __nccwpck_require__(3910) +const SemVer = __nccwpck_require__(2749) +const Range = __nccwpck_require__(9007) -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ +/***/ }), -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); +/***/ 9007: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.type = 'aborted'; - this.message = message; +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } -const URL$1 = Url.URL || whatwgUrl.URL; + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } -/** - * isSameProtocol reports whether the two provided URLs use the same protocol. - * - * Both domains must already be in canonical form. - * @param {string|URL} original - * @param {string|URL} destination - */ -const isSameProtocol = function isSameProtocol(destination, original) { - const orig = new URL$1(original).protocol; - const dest = new URL$1(destination).protocol; + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } - return orig === dest; -}; + this.format() + } -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - destroyStream(request.body, error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - - if (response && response.body) { - destroyStream(response.body, err); - } - - finalize(); - }); - - fixResponseChunkedTransferBadEnding(req, function (err) { - if (signal && signal.aborted) { - return; - } - - if (response && response.body) { - destroyStream(response.body, err); - } - }); - - /* c8 ignore next 18 */ - if (parseInt(process.version.substring(1)) < 14) { - // Before Node.js 14, pipeline() does not fully support async iterators and does not always - // properly handle when the socket close/end events are out of order. - req.on('socket', function (s) { - s.addListener('close', function (hadError) { - // if a data listener is still present we didn't end cleanly - const hasDataListener = s.listenerCount('data') > 0; - - // if end happened before close but the socket didn't emit an error, do it now - if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - response.body.emit('error', err); - } - }); - }); - } - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; - - if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - raw.on('end', function () { - // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. - if (!response) { - response = new Response(body, response_options); - resolve(response); - } - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -function fixResponseChunkedTransferBadEnding(request, errorCallback) { - let socket; - - request.on('socket', function (s) { - socket = s; - }); - - request.on('response', function (response) { - const headers = response.headers; - - if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { - response.once('close', function (hadError) { - // tests for socket presence, as in some situations the - // the 'socket' event is not triggered for the request - // (happens in deno), avoids `TypeError` - // if a data listener is still present we didn't end cleanly - const hasDataListener = socket && socket.listenerCount('data') > 0; - - if (hasDataListener && !hadError) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - errorCallback(err); - } - }); - } - }); -} - -function destroyStream(stream, err) { - if (stream.destroy) { - stream.destroy(err); - } else { - // node < 8 - stream.emit('error', err); - stream.end(); - } -} + format () { + this.range = this.set + .map((comps) => comps.join(' ').trim()) + .join('||') + .trim() + return this.range + } -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; + toString () { + return this.range + } -// expose Promise -fetch.Promise = global.Promise; + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports["default"] = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; -exports.AbortError = AbortError; + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) -/***/ }), + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) + // At this point, the range is completely trimmed and + // ready to be split into comparators. -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } -/***/ }), - -/***/ 8714: -/***/ ((module) => { + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } -function posix(path) { - return path.charAt(0) === '/'; + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } } -function win32(path) { - // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 - var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - var result = splitDeviceRe.exec(path); - var device = result[1] || ''; - var isUnc = Boolean(device && device.charAt(1) !== ':'); - - // UNC paths are always absolute - return Boolean(result[2] || isUnc); -} +module.exports = Range -module.exports = process.platform === 'win32' ? win32 : posix; -module.exports.posix = posix; -module.exports.win32 = win32; +const LRU = __nccwpck_require__(2135) +const cache = new LRU() +const parseOptions = __nccwpck_require__(3526) +const Comparator = __nccwpck_require__(2638) +const debug = __nccwpck_require__(3910) +const SemVer = __nccwpck_require__(2749) +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = __nccwpck_require__(9274) +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = __nccwpck_require__(6489) -/***/ }), +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' -/***/ 7810: -/***/ ((module) => { +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + testComparator = remainingComparators.pop() + } -if (typeof process === 'undefined' || - !process.version || - process.version.indexOf('v0.') === 0 || - process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { - module.exports = { nextTick: nextTick }; -} else { - module.exports = process + return result } -function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== 'function') { - throw new TypeError('"callback" argument must be a function'); - } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(function afterTickOne() { - fn.call(null, arg1); - }); - case 3: - return process.nextTick(function afterTickTwo() { - fn.call(null, arg1, arg2); - }); - case 4: - return process.nextTick(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); - }); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; - } - return process.nextTick(function afterTick() { - fn.apply(null, args); - }); - } +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp } +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} -/***/ }), - -/***/ 1359: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. + debug('tilde return', ret) + return ret + }) +} +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret -/**/ + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } -var pna = __nccwpck_require__(7810); -/**/ + debug('caret return', ret) + return ret + }) +} -/**/ -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} -module.exports = Duplex; +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp -/**/ -var util = Object.create(__nccwpck_require__(5898)); -util.inherits = __nccwpck_require__(4124); -/**/ + if (gtlt === '=' && anyX) { + gtlt = '' + } -var Readable = __nccwpck_require__(1433); -var Writable = __nccwpck_require__(6993); + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' -util.inherits(Duplex, Readable); + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 -{ - // avoid scope creep, the keys array can then be collected - var keys = objectKeys(Writable.prototype); - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; - } -} + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); + if (gtlt === '<') { + pr = '-0' + } - Readable.call(this, options); - Writable.call(this, options); + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } - if (options && options.readable === false) this.readable = false; + debug('xRange return', ret) - if (options && options.writable === false) this.writable = false; + return ret + }) +} - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} - this.once('end', onend); +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') } -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +// TODO build? +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` } -}); - -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - // no more data can be written. - // But allow more writes to happen in this tick. - pna.nextTick(onEndNT, this); -} + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } -function onEndNT(self) { - self.end(); + return `${from} ${to}`.trim() } -Object.defineProperty(Duplex.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined || this._writableState === undefined) { - return false; +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } } - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - this._writableState.destroyed = value; + // Version has a -pre, but it's not one of the ones we like. + return false } -}); -Duplex.prototype._destroy = function (err, cb) { - this.push(null); - this.end(); + return true +} - pna.nextTick(cb, err); -}; /***/ }), -/***/ 1542: +/***/ 2749: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +const debug = __nccwpck_require__(3910) +const { MAX_LENGTH, MAX_SAFE_INTEGER } = __nccwpck_require__(6489) +const { safeRe: re, t } = __nccwpck_require__(9274) -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. +const parseOptions = __nccwpck_require__(3526) +const { compareIdentifiers } = __nccwpck_require__(7704) +class SemVer { + constructor (version, options) { + options = parseOptions(options) + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } -module.exports = PassThrough; + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease -var Transform = __nccwpck_require__(4415); + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) -/**/ -var util = Object.create(__nccwpck_require__(5898)); -util.inherits = __nccwpck_require__(4124); -/**/ + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } -util.inherits(PassThrough, Transform); + this.raw = version -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] - Transform.call(this, options); -} + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } -/***/ }), + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } -/***/ 1433: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + toString () { + return this.version + } -/**/ + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } -var pna = __nccwpck_require__(7810); -/**/ + if (other.version === this.version) { + return 0 + } -module.exports = Readable; + return this.compareMain(other) || this.comparePre(other) + } -/**/ -var isArray = __nccwpck_require__(893); -/**/ + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -/**/ -var Duplex; -/**/ + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } -Readable.ReadableState = ReadableState; + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -/**/ -var EE = (__nccwpck_require__(2361).EventEmitter); + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } -/**/ -var Stream = __nccwpck_require__(2387); -/**/ + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -/**/ + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('build compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } -var Buffer = (__nccwpck_require__(1867).Buffer); -var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break -/**/ + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 -/**/ -var util = Object.create(__nccwpck_require__(5898)); -util.inherits = __nccwpck_require__(4124); -/**/ + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } -/**/ -var debugUtil = __nccwpck_require__(3837); -var debug = void 0; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } } -/**/ -var BufferList = __nccwpck_require__(7053); -var destroyImpl = __nccwpck_require__(7049); -var StringDecoder; +module.exports = SemVer -util.inherits(Readable, Stream); -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +/***/ }), -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); +/***/ 1965: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +const parse = __nccwpck_require__(9180) +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null } +module.exports = clean -function ReadableState(options, stream) { - Duplex = Duplex || __nccwpck_require__(1359); - - options = options || {}; - - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; - - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; +/***/ }), - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var readableHwm = options.readableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; +/***/ 1001: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; +const eq = __nccwpck_require__(5154) +const neq = __nccwpck_require__(4056) +const gt = __nccwpck_require__(6989) +const gte = __nccwpck_require__(2076) +const lt = __nccwpck_require__(6269) +const lte = __nccwpck_require__(2133) - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b - // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true; + case '': + case '=': + case '==': + return eq(a, b, loose) - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; + case '!=': + return neq(a, b, loose) - // has it been destroyed - this.destroyed = false; + case '>': + return gt(a, b, loose) - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; + case '>=': + return gte(a, b, loose) - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; + case '<': + return lt(a, b, loose) - // if true, a maybeReadMore has been scheduled - this.readingMore = false; + case '<=': + return lte(a, b, loose) - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; + default: + throw new TypeError(`Invalid operator: ${op}`) } } +module.exports = cmp -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(1359); - - if (!(this instanceof Readable)) return new Readable(options); - this._readableState = new ReadableState(options, this); +/***/ }), - // legacy - this.readable = true; +/***/ 6401: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (options) { - if (typeof options.read === 'function') this._read = options.read; +const SemVer = __nccwpck_require__(2749) +const parse = __nccwpck_require__(9180) +const { safeRe: re, t } = __nccwpck_require__(9274) - if (typeof options.destroy === 'function') this._destroy = options.destroy; +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version } - Stream.call(this); -} - -Object.defineProperty(Readable.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined) { - return false; - } - return this._readableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } - - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; + if (typeof version === 'number') { + version = String(version) } -}); - -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function (err, cb) { - this.push(null); - cb(err); -}; - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; + if (typeof version !== 'string') { + return null } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; + options = options || {} -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (addToFront) { - if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); - } else if (state.ended) { - stream.emit('error', new Error('stream.push() after EOF')); - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next } - } else if (!addToFront) { - state.reading = false; + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 } - return needMoreData(state); -} - -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - - if (state.needReadable) emitReadable(stream); + if (match === null) { + return null } - maybeReadMore(stream, state); -} -function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) } +module.exports = coerce -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; +/***/ }), -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} +/***/ 7477: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; - } - return state.length; +const SemVer = __nccwpck_require__(2749) +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) } +module.exports = compareBuild -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; +/***/ }), - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } +/***/ 9982: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - n = howMuchToRead(n, state); +const compare = __nccwpck_require__(439) +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. +/***/ }), - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); +/***/ 439: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } +const SemVer = __nccwpck_require__(2749) +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); - } +module.exports = compare - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - if (ret === null) { - state.needReadable = true; - n = 0; - } else { - state.length -= n; - } +/***/ }), - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; +/***/ 6375: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); +const parse = __nccwpck_require__(9180) + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null } - if (ret !== null) this.emit('data', ret); + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length - return ret; -}; + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' } - } - state.ended = true; - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} + // Otherwise it can be determined by checking the high version -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' } -} -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - pna.nextTick(maybeReadMore_, stream, state); + if (v1.major !== v2.major) { + return prefix + 'major' } -} -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; + if (v1.minor !== v2.minor) { + return prefix + 'minor' } - state.readingMore = false; + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' } -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('_read() is not implemented')); -}; +module.exports = diff -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); +/***/ }), - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; +/***/ 5154: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); +const compare = __nccwpck_require__(439) +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq - dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - function onend() { - debug('onend'); - dest.end(); - } +/***/ }), - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); +/***/ 6989: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); +const compare = __nccwpck_require__(439) +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt - cleanedUp = true; - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } +/***/ }), - // If the user pushes more data while we're writing to dest then we'll end up - // in ondata again. However, we only want to increase awaitDrain once because - // dest will only emit one 'drain' event for the multiple writes. - // => Introduce a guard on increasing awaitDrain. - var increasedAwaitDrain = false; - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - increasedAwaitDrain = false; - var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; - increasedAwaitDrain = true; - } - src.pause(); - } - } +/***/ 2076: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } +const compare = __nccwpck_require__(439) +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); +/***/ }), + +/***/ 8009: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(2749) + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined } - dest.once('finish', onfinish); - function unpipe() { - debug('unpipe'); - src.unpipe(dest); + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null } +} +module.exports = inc - // tell the dest that it's being piped to - dest.emit('pipe', src); - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } +/***/ }), - return dest; -}; +/***/ 6269: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} +const compare = __nccwpck_require__(439) +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { hasUnpiped: false }; - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; +/***/ }), - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; +/***/ 2133: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (!dest) dest = state.pipes; +const compare = __nccwpck_require__(439) +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; - } - // slow case. multiple pipe destinations. +/***/ }), - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; +/***/ 6635: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { hasUnpiped: false }); - }return this; - } +const SemVer = __nccwpck_require__(2749) +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major - // try to find the right one. - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; +/***/ }), - dest.emit('unpipe', this, unpipeInfo); +/***/ 9014: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return this; -}; +const SemVer = __nccwpck_require__(2749) +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - if (ev === 'data') { - // Start flowing on next tick if stream isn't explicitly paused - if (this._readableState.flowing !== false) this.resume(); - } else if (ev === 'readable') { - var state = this._readableState; - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.emittedReadable = false; - if (!state.reading) { - pna.nextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this); - } - } - } +/***/ }), - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; +/***/ 4056: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} +const compare = __nccwpck_require__(439) +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - pna.nextTick(resume_, stream, state); +/***/ }), + +/***/ 9180: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(2749) +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er } } -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } +module.exports = parse - state.resumeScheduled = false; - state.awaitDrain = 0; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; +/***/ }), -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null) {} +/***/ 4609: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(2749) +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch + + +/***/ }), + +/***/ 6788: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const parse = __nccwpck_require__(9180) +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null } +module.exports = prerelease -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var _this = this; - var state = this._readableState; - var paused = false; +/***/ }), - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } +/***/ 7400: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - _this.push(null); - }); +const compare = __nccwpck_require__(439) +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; +/***/ }), - var ret = _this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); +/***/ 8035: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } +const compareBuild = __nccwpck_require__(7477) +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort - // proxy certain important events. - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } - // when we try to consume some more bytes, simply unpause the - // underlying stream. - this._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; +/***/ }), - return this; -}; +/***/ 7688: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._readableState.highWaterMark; +const Range = __nccwpck_require__(9007) +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false } -}); + return range.test(version) +} +module.exports = satisfies -// exposed for testing purposes only. -Readable._fromList = fromList; -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; +/***/ }), - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = fromListPartial(n, state.buffer, state.decoder); - } +/***/ 4299: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return ret; -} +const compareBuild = __nccwpck_require__(7477) +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort -// Extracts only enough buffered data to satisfy the amount requested. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromListPartial(n, list, hasStrings) { - var ret; - if (n < list.head.data.length) { - // slice is the same for buffers and strings - ret = list.head.data.slice(0, n); - list.head.data = list.head.data.slice(n); - } else if (n === list.head.data.length) { - // first chunk is a perfect match - ret = list.shift(); - } else { - // result spans more than one buffer - ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); - } - return ret; -} -// Copies a specified amount of characters from the list of buffered data -// chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBufferString(n, list) { - var p = list.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = str.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} +/***/ }), -// Copies a specified amount of bytes from the list of buffered data chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBuffer(n, list) { - var ret = Buffer.allocUnsafe(n); - var p = list.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = buf.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; +/***/ 7617: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const parse = __nccwpck_require__(9180) +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null } +module.exports = valid -function endReadable(stream) { - var state = stream._readableState; - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); +/***/ }), - if (!state.endEmitted) { - state.ended = true; - pna.nextTick(endReadableNT, state, stream); - } -} +/***/ 8033: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } +// just pre-load all the stuff that index.js lazily exports +const internalRe = __nccwpck_require__(9274) +const constants = __nccwpck_require__(6489) +const SemVer = __nccwpck_require__(2749) +const identifiers = __nccwpck_require__(7704) +const parse = __nccwpck_require__(9180) +const valid = __nccwpck_require__(7617) +const clean = __nccwpck_require__(1965) +const inc = __nccwpck_require__(8009) +const diff = __nccwpck_require__(6375) +const major = __nccwpck_require__(6635) +const minor = __nccwpck_require__(9014) +const patch = __nccwpck_require__(4609) +const prerelease = __nccwpck_require__(6788) +const compare = __nccwpck_require__(439) +const rcompare = __nccwpck_require__(7400) +const compareLoose = __nccwpck_require__(9982) +const compareBuild = __nccwpck_require__(7477) +const sort = __nccwpck_require__(4299) +const rsort = __nccwpck_require__(8035) +const gt = __nccwpck_require__(6989) +const lt = __nccwpck_require__(6269) +const eq = __nccwpck_require__(5154) +const neq = __nccwpck_require__(4056) +const gte = __nccwpck_require__(2076) +const lte = __nccwpck_require__(2133) +const cmp = __nccwpck_require__(1001) +const coerce = __nccwpck_require__(6401) +const Comparator = __nccwpck_require__(2638) +const Range = __nccwpck_require__(9007) +const satisfies = __nccwpck_require__(7688) +const toComparators = __nccwpck_require__(1659) +const maxSatisfying = __nccwpck_require__(6233) +const minSatisfying = __nccwpck_require__(9683) +const minVersion = __nccwpck_require__(1661) +const validRange = __nccwpck_require__(3666) +const outside = __nccwpck_require__(2358) +const gtr = __nccwpck_require__(5098) +const ltr = __nccwpck_require__(2330) +const intersects = __nccwpck_require__(5802) +const simplifyRange = __nccwpck_require__(7918) +const subset = __nccwpck_require__(4805) +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, } -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -} /***/ }), -/***/ 4415: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6489: +/***/ ((module) => { -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 -module.exports = Transform; +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] -var Duplex = __nccwpck_require__(1359); +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} -/**/ -var util = Object.create(__nccwpck_require__(5898)); -util.inherits = __nccwpck_require__(4124); -/**/ -util.inherits(Transform, Duplex); +/***/ }), -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; +/***/ 3910: +/***/ ((module) => { - var cb = ts.writecb; +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} - if (!cb) { - return this.emit('error', new Error('write callback called multiple times')); - } +module.exports = debug - ts.writechunk = null; - ts.writecb = null; - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); +/***/ }), - cb(er); +/***/ 7704: +/***/ ((module) => { - var rs = this._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 } -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) - Duplex.call(this, options); +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; +/***/ }), - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; +/***/ 2135: +/***/ ((module) => { - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; +class LRUCache { + constructor () { + this.max = 1000 + this.map = new Map() + } - if (typeof options.flush === 'function') this._flush = options.flush; + get (key) { + const value = this.map.get(key) + if (value === undefined) { + return undefined + } else { + // Remove the key from the map and add it to the end + this.map.delete(key) + this.map.set(key, value) + return value + } } - // When the writable side finishes, then flush out anything remaining. - this.on('prefinish', prefinish); -} + delete (key) { + return this.map.delete(key) + } -function prefinish() { - var _this = this; + set (key, value) { + const deleted = this.delete(key) - if (typeof this._flush === 'function') { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); + if (!deleted && value !== undefined) { + // If cache is full, delete the least recently used item + if (this.map.size >= this.max) { + const firstKey = this.map.keys().next().value + this.delete(firstKey) + } + + this.map.set(key, value) + } + + return this } } -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; +module.exports = LRUCache -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('_transform() is not implemented'); -}; -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; +/***/ }), -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; +/***/ 3526: +/***/ ((module) => { - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts } -}; -Transform.prototype._destroy = function (err, cb) { - var _this2 = this; + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions + + +/***/ }), + +/***/ 9274: +/***/ ((module, exports, __nccwpck_require__) => { - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - _this2.emit('close'); - }); -}; +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = __nccwpck_require__(6489) +const debug = __nccwpck_require__(3910) +exports = module.exports = {} -function done(stream, er, data) { - if (er) return stream.emit('error', er); +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] - if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} - return stream.push(null); +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) } -/***/ }), +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. -/***/ 6993: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) +// ## Main Version +// Three dot-separated numeric identifiers. -/**/ +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) -var pna = __nccwpck_require__(7810); -/**/ +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) -module.exports = Writable; +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) - this.next = null; - this.entry = null; - this.finish = function () { - onCorkedFinish(_this, state); - }; -} -/* */ +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; -/**/ +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) -/**/ -var Duplex; -/**/ +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) -Writable.WritableState = WritableState; +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. -/**/ -var util = Object.create(__nccwpck_require__(5898)); -util.inherits = __nccwpck_require__(4124); -/**/ +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) -/**/ -var internalUtil = { - deprecate: __nccwpck_require__(5278) -}; -/**/ +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. -/**/ -var Stream = __nccwpck_require__(2387); -/**/ +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) -/**/ +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. -var Buffer = (__nccwpck_require__(1867).Buffer); -var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. -/**/ +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) -var destroyImpl = __nccwpck_require__(7049); +createToken('FULL', `^${src[t.FULLPLAIN]}$`) -util.inherits(Writable, Stream); +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) -function nop() {} +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) -function WritableState(options, stream) { - Duplex = Duplex || __nccwpck_require__(1359); +createToken('GTLT', '((?:<|>)?=?)') - options = options || {}; +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var writableHwm = options.writableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' - // if _final has been called - this.finalCalled = false; +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') - // has it been destroyed - this.destroyed = false; +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' - // a flag to see when we're in the middle of a write. - this.writing = false; +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) - // when true all writes will be buffered until .uncork() call - this.corked = 0; +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; +/***/ }), - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; +/***/ 5098: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // the amount that is being written when _write is called. - this.writelen = 0; +// Determine if version is greater than all the versions possible in the range. +const outside = __nccwpck_require__(2358) +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr - this.bufferedRequest = null; - this.lastBufferedRequest = null; - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; +/***/ }), - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; +/***/ 5802: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; +const Range = __nccwpck_require__(9007) +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects - // count buffered requests - this.bufferedRequestCount = 0; - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); -} +/***/ }), -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; +/***/ 2330: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); +const outside = __nccwpck_require__(2358) +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr -// Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. -var realHasInstance; -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function (object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; +/***/ }), + +/***/ 6233: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(2749) +const Range = __nccwpck_require__(9007) + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } } - }); -} else { - realHasInstance = function (object) { - return object instanceof this; - }; + }) + return max } +module.exports = maxSatisfying -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(1359); - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. +/***/ }), - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { - return new Writable(options); +/***/ 9683: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const SemVer = __nccwpck_require__(2749) +const Range = __nccwpck_require__(9007) +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying - this._writableState = new WritableState(options, this); - // legacy. - this.writable = true; +/***/ }), - if (options) { - if (typeof options.write === 'function') this._write = options.write; +/***/ 1661: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (typeof options.writev === 'function') this._writev = options.writev; +const SemVer = __nccwpck_require__(2749) +const Range = __nccwpck_require__(9007) +const gt = __nccwpck_require__(6989) - if (typeof options.destroy === 'function') this._destroy = options.destroy; +const minVersion = (range, loose) => { + range = new Range(range, loose) - if (typeof options.final === 'function') this._final = options.final; + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver } - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe, not readable')); -}; - -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - pna.nextTick(cb, er); -} + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } -// Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. -function validChunk(stream, state, chunk, cb) { - var valid = true; - var er = false; + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] - if (chunk === null) { - er = new TypeError('May not write null values to stream'); - } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } } - if (er) { - stream.emit('error', er); - pna.nextTick(cb, er); - valid = false; + + if (minver && range.test(minver)) { + return minver } - return valid; + + return null } +module.exports = minVersion -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); - } +/***/ }), - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } +/***/ 2358: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; +const SemVer = __nccwpck_require__(2749) +const Comparator = __nccwpck_require__(2638) +const { ANY } = Comparator +const Range = __nccwpck_require__(9007) +const satisfies = __nccwpck_require__(7688) +const gt = __nccwpck_require__(6989) +const lt = __nccwpck_require__(6269) +const lte = __nccwpck_require__(2133) +const gte = __nccwpck_require__(2076) - if (typeof cb !== 'function') cb = nop; +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) - if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') } - return ret; -}; - -Writable.prototype.cork = function () { - var state = this._writableState; + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } - state.corked++; -}; + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. -Writable.prototype.uncork = function () { - var state = this._writableState; + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] - if (state.corked) { - state.corked--; + let high = null + let low = null - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } } - return chunk; + return true } -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; - } -}); +module.exports = outside -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } - } - var len = state.objectMode ? 1 : chunk.length; - state.length += len; +/***/ }), - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; +/***/ 7918: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = __nccwpck_require__(7688) +const compare = __nccwpck_require__(439) +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } } else { - state.bufferedRequest = state.lastBufferedRequest; + if (prev) { + set.push([first, prev]) + } + prev = null + first = null } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); + } + if (first) { + set.push([first, null]) } - return ret; + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range } -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; +/***/ }), - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - pna.nextTick(cb, er); - // this can emit finish, and it will always happen - // after error - pna.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - // this can emit finish, but finish must - // always follow error - finishMaybe(stream, state); - } -} +/***/ 4805: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} +const Range = __nccwpck_require__(9007) +const Comparator = __nccwpck_require__(2638) +const { ANY } = Comparator +const satisfies = __nccwpck_require__(7688) +const compare = __nccwpck_require__(439) -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true - onwriteStateUpdate(state); +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } } - - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false } } + return true } -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true } -} - -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion } - buffer.allBuffers = allBuffers; - - doWrite(stream, state, true, state.length, buffer, '', holder.finish); + } - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true } else { - state.corkedRequestsFree = new CorkedRequest(state); + dom = minimumVersion } - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; + } - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) } + } - if (entry === null) state.lastBufferedRequest = null; + if (eqSet.size > 1) { + return null } - state.bufferedRequest = entry; - state.bufferProcessing = false; -} + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('_write() is not implemented')); -}; + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } -Writable.prototype._writev = null; + if (lt && !satisfies(eq, String(lt), options)) { + return null + } -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; + return true } - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false } - // ignore unnecessary end() calls. - if (!state.ending) endWritable(this, state, cb); -}; - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - if (err) { - stream.emit('error', err); + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } } - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function') { - state.pendingcb++; - state.finalCalled = true; - pna.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false } } -} -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - } + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false } - return need; -} -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false } - state.ended = true; - stream.writable = false; -} -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false } - // reuse the free corkReq. - state.corkedRequestsFree.next = corkReq; + return true } -Object.defineProperty(Writable.prototype, 'destroyed', { - get: function () { - if (this._writableState === undefined) { - return false; - } - return this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; - } +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} - // backward compatibility, the user is explicitly - // managing destroyed - this._writableState.destroyed = value; +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b } -}); + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function (err, cb) { - this.end(); - cb(err); -}; /***/ }), -/***/ 7053: +/***/ 1659: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const Range = __nccwpck_require__(9007) +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +module.exports = toComparators -var Buffer = (__nccwpck_require__(1867).Buffer); -var util = __nccwpck_require__(3837); -function copyBuffer(src, target, offset) { - src.copy(target, offset); -} +/***/ }), -module.exports = function () { - function BufferList() { - _classCallCheck(this, BufferList); +/***/ 3666: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.head = null; - this.tail = null; - this.length = 0; +const Range = __nccwpck_require__(9007) +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null } - - BufferList.prototype.push = function push(v) { - var entry = { data: v, next: null }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - }; - - BufferList.prototype.unshift = function unshift(v) { - var entry = { data: v, next: this.head }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - }; - - BufferList.prototype.shift = function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - }; - - BufferList.prototype.clear = function clear() { - this.head = this.tail = null; - this.length = 0; - }; - - BufferList.prototype.join = function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) { - ret += s + p.data; - }return ret; - }; - - BufferList.prototype.concat = function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; - }; - - return BufferList; -}(); - -if (util && util.inspect && util.inspect.custom) { - module.exports.prototype[util.inspect.custom] = function () { - var obj = util.inspect({ length: this.length }); - return this.constructor.name + ' ' + obj; - }; } +module.exports = validRange + /***/ }), -/***/ 7049: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6461: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. /**/ -var pna = __nccwpck_require__(7810); +var Buffer = (__nccwpck_require__(8746).Buffer); /**/ -// undocumented cb() API, needed for core, not for public API -function destroy(err, cb) { - var _this = this; - - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - pna.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - pna.nextTick(emitErrorNT, this, err); - } +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; } - - return this; } +}; - // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} - if (this._readableState) { - this._readableState.destroyed = true; +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.s = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} - // if this is a duplex stream mark the writable part as destroyed as well - if (this._writableState) { - this._writableState.destroyed = true; +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; - this._destroy(err || null, function (err) { - if (!cb && err) { - if (!_this._writableState) { - pna.nextTick(emitErrorNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - pna.nextTick(emitErrorNT, _this, err); - } - } else if (cb) { - cb(err); - } - }); +StringDecoder.prototype.end = utf8End; - return this; -} +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; - } +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; } -function emitErrorNT(self, err) { - self.emit('error', err); +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; } -module.exports = { - destroy: destroy, - undestroy: undestroy -}; - -/***/ }), - -/***/ 2387: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = __nccwpck_require__(2781); - - -/***/ }), - -/***/ 1642: -/***/ ((module, exports, __nccwpck_require__) => { - -var Stream = __nccwpck_require__(2781); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream; - exports = module.exports = Stream.Readable; - exports.Readable = Stream.Readable; - exports.Writable = Stream.Writable; - exports.Duplex = Stream.Duplex; - exports.Transform = Stream.Transform; - exports.PassThrough = Stream.PassThrough; - exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(1433); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(6993); - exports.Duplex = __nccwpck_require__(1359); - exports.Transform = __nccwpck_require__(4415); - exports.PassThrough = __nccwpck_require__(1542); +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } } - -/***/ }), - -/***/ 4959: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = rimraf -rimraf.sync = rimrafSync - -var assert = __nccwpck_require__(9491) -var path = __nccwpck_require__(1017) -var fs = __nccwpck_require__(7147) -var glob = undefined -try { - glob = __nccwpck_require__(1957) -} catch (_err) { - // treat glob as optional. +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; } -var _0666 = parseInt('666', 8) -var defaultGlobOpts = { - nosort: true, - silent: true +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); } -// for EMFILE handling -var timeout = 0 - -var isWindows = (process.platform === "win32") - -function defaults (options) { - var methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(function(m) { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} - options.maxBusyTries = options.maxBusyTries || 3 - options.emfileWait = options.emfileWait || 1000 - if (options.glob === false) { - options.disableGlob = true - } - if (options.disableGlob !== true && glob === undefined) { - throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; } - options.disableGlob = options.disableGlob || false - options.glob = options.glob || defaultGlobOpts + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); } -function rimraf (p, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); } + return r; +} - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert.equal(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} - var busyTries = 0 - var errState = null - var n = 0 +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} - if (options.disableGlob || !glob.hasMagic(p)) - return afterGlob(null, [p]) +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} - options.lstat(p, function (er, stat) { - if (!er) - return afterGlob(null, [p]) +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} - glob(p, options.glob, afterGlob) - }) +/***/ }), - function next (er) { - errState = errState || er - if (--n === 0) - cb(errState) - } +/***/ 3449: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - function afterGlob (er, results) { - if (er) - return cb(er) +module.exports = __nccwpck_require__(2172); - n = results.length - if (n === 0) - return cb() - results.forEach(function (p) { - rimraf_(p, options, function CB (er) { - if (er) { - if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && - busyTries < options.maxBusyTries) { - busyTries ++ - var time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(function () { - rimraf_(p, options, CB) - }, time) - } +/***/ }), - // this one won't happen if graceful-fs is used. - if (er.code === "EMFILE" && timeout < options.emfileWait) { - return setTimeout(function () { - rimraf_(p, options, CB) - }, timeout ++) - } +/***/ 2172: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // already gone - if (er.code === "ENOENT") er = null - } - timeout = 0 - next(er) - }) - }) - } -} -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') +var net = __nccwpck_require__(1808); +var tls = __nccwpck_require__(4404); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var events = __nccwpck_require__(2361); +var assert = __nccwpck_require__(9491); +var util = __nccwpck_require__(3837); - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, function (er, st) { - if (er && er.code === "ENOENT") - return cb(null) - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === "EPERM" && isWindows) - fixWinEPERM(p, options, er, cb) +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; - if (st && st.isDirectory()) - return rmdir(p, options, er, cb) - options.unlink(p, function (er) { - if (er) { - if (er.code === "ENOENT") - return cb(null) - if (er.code === "EPERM") - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - if (er.code === "EISDIR") - return rmdir(p, options, er, cb) - } - return cb(er) - }) - }) +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; } -function fixWinEPERM (p, options, er, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - if (er) - assert(er instanceof Error) - - options.chmod(p, _0666, function (er2) { - if (er2) - cb(er2.code === "ENOENT" ? null : er) - else - options.stat(p, function(er3, stats) { - if (er3) - cb(er3.code === "ENOENT" ? null : er) - else if (stats.isDirectory()) - rmdir(p, options, er, cb) - else - options.unlink(p, cb) - }) - }) +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -function fixWinEPERMSync (p, options, er) { - assert(p) - assert(options) - if (er) - assert(er instanceof Error) - - try { - options.chmodSync(p, _0666) - } catch (er2) { - if (er2.code === "ENOENT") - return - else - throw er - } - - try { - var stats = options.statSync(p) - } catch (er3) { - if (er3.code === "ENOENT") - return - else - throw er - } - - if (stats.isDirectory()) - rmdirSync(p, options, er) - else - options.unlinkSync(p) +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; } -function rmdir (p, options, originalEr, cb) { - assert(p) - assert(options) - if (originalEr) - assert(originalEr instanceof Error) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, function (er) { - if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) - rmkids(p, options, cb) - else if (er && er.code === "ENOTDIR") - cb(originalEr) - else - cb(er) - }) +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -function rmkids(p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, function (er, files) { - if (er) - return cb(er) - var n = files.length - if (n === 0) - return options.rmdir(p, cb) - var errState - files.forEach(function (f) { - rimraf(path.join(p, f), options, function (er) { - if (errState) - return - if (er) - return cb(errState = er) - if (--n === 0) - options.rmdir(p, cb) - }) - }) - }) -} -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p, options) { - options = options || {} - defaults(options) +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.equal(typeof options, 'object', 'rimraf: options should be object') + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); - var results +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - if (options.disableGlob || !glob.hasMagic(p)) { - results = [p] - } else { - try { - options.lstatSync(p) - results = [p] - } catch (er) { - results = glob.sync(p, options.glob) - } + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; } - if (!results.length) - return - - for (var i = 0; i < results.length; i++) { - var p = results[i] + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); - try { - var st = options.lstatSync(p) - } catch (er) { - if (er.code === "ENOENT") - return + function onFree() { + self.emit('free', socket, options); + } - // Windows can EPERM on stat. Life is suffering. - if (er.code === "EPERM" && isWindows) - fixWinEPERMSync(p, options, er) + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); } + }); +}; - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) - rmdirSync(p, options, null) - else - options.unlinkSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "EPERM") - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - if (er.code !== "EISDIR") - throw er +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); - rmdirSync(p, options, er) + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); } -} -function rmdirSync (p, options, originalEr) { - assert(p) - assert(options) - if (originalEr) - assert(originalEr instanceof Error) + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "ENOTDIR") - throw originalEr - if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") - rmkidsSync(p, options) + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; } -} -function rmkidsSync (p, options) { - assert(p) - assert(options) - options.readdirSync(p).forEach(function (f) { - rimrafSync(path.join(p, f), options) - }) + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - var retries = isWindows ? 100 : 1 - var i = 0 - do { - var threw = true - try { - var ret = options.rmdirSync(p, options) - threw = false - return ret - } finally { - if (++i < retries && threw) - continue - } - } while (true) -} + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } -/***/ }), + function onError(cause) { + connectReq.removeAllListeners(); -/***/ 1867: -/***/ ((module, exports, __nccwpck_require__) => { + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; -/* eslint-disable node/no-deprecated-api */ -var buffer = __nccwpck_require__(4300) -var Buffer = buffer.Buffer +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); } -} -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} +}; -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); } -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; } - return Buffer(arg, encodingOrOffset, length) + return host; // for v0.11 or later } -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } } - } else { - buf.fill(0) } - return buf + return target; } -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return Buffer(size) -} -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); } - return buffer.SlowBuffer(size) +} else { + debug = function() {}; } +exports.debug = debug; // for test /***/ }), -/***/ 1532: +/***/ 9027: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const ANY = Symbol('SemVer ANY') -// hoisted class for cyclic dependency -class Comparator { - static get ANY () { - return ANY - } - constructor (comp, options) { - options = parseOptions(options) - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } +const Client = __nccwpck_require__(2802) +const Dispatcher = __nccwpck_require__(4809) +const errors = __nccwpck_require__(5465) +const Pool = __nccwpck_require__(408) +const BalancedPool = __nccwpck_require__(6399) +const Agent = __nccwpck_require__(916) +const util = __nccwpck_require__(825) +const { InvalidArgumentError } = errors +const api = __nccwpck_require__(3249) +const buildConnector = __nccwpck_require__(4451) +const MockClient = __nccwpck_require__(3903) +const MockAgent = __nccwpck_require__(8031) +const MockPool = __nccwpck_require__(8639) +const mockErrors = __nccwpck_require__(8043) +const ProxyAgent = __nccwpck_require__(171) +const RetryHandler = __nccwpck_require__(5814) +const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(8552) +const DecoratorHandler = __nccwpck_require__(9858) +const RedirectHandler = __nccwpck_require__(2517) +const createRedirectInterceptor = __nccwpck_require__(7357) - comp = comp.trim().split(/\s+/).join(' ') - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) +let hasCrypto +try { + __nccwpck_require__(6113) + hasCrypto = true +} catch { + hasCrypto = false +} - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } +Object.assign(Dispatcher.prototype, api) - debug('comp', this) - } +module.exports.Dispatcher = Dispatcher +module.exports.Client = Client +module.exports.Pool = Pool +module.exports.BalancedPool = BalancedPool +module.exports.Agent = Agent +module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler - parse (comp) { - const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] - const m = comp.match(r) +module.exports.DecoratorHandler = DecoratorHandler +module.exports.RedirectHandler = RedirectHandler +module.exports.createRedirectInterceptor = createRedirectInterceptor - if (!m) { - throw new TypeError(`Invalid comparator: ${comp}`) +module.exports.buildConnector = buildConnector +module.exports.errors = errors + +function makeDispatcher (fn) { + return (url, opts, handler) => { + if (typeof opts === 'function') { + handler = opts + opts = null } - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' + if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { + throw new InvalidArgumentError('invalid url') } - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) + if (opts != null && typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } - } - toString () { - return this.value - } + if (opts && opts.path != null) { + if (typeof opts.path !== 'string') { + throw new InvalidArgumentError('invalid opts.path') + } - test (version) { - debug('Comparator.test', version, this.options.loose) + let path = opts.path + if (!opts.path.startsWith('/')) { + path = `/${path}` + } - if (this.semver === ANY || version === ANY) { - return true + url = new URL(util.parseOrigin(url).origin + path) + } else { + if (!opts) { + opts = typeof url === 'object' ? url : {} + } + + url = util.parseURL(url) } - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } + const { agent, dispatcher = getGlobalDispatcher() } = opts + + if (agent) { + throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') } - return cmp(version, this.operator, this.semver, this.options) + return fn.call(dispatcher, { + ...opts, + origin: url.origin, + path: url.search ? `${url.pathname}${url.search}` : url.pathname, + method: opts.method || (opts.body ? 'PUT' : 'GET') + }, handler) } +} - intersects (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') +module.exports.setGlobalDispatcher = setGlobalDispatcher +module.exports.getGlobalDispatcher = getGlobalDispatcher + +if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { + let fetchImpl = null + module.exports.fetch = async function fetch (resource) { + if (!fetchImpl) { + fetchImpl = (__nccwpck_require__(2503).fetch) } - if (this.operator === '') { - if (this.value === '') { - return true - } - return new Range(comp.value, options).test(this.value) - } else if (comp.operator === '') { - if (comp.value === '') { - return true + try { + return await fetchImpl(...arguments) + } catch (err) { + if (typeof err === 'object') { + Error.captureStackTrace(err, this) } - return new Range(this.value, options).test(comp.semver) + + throw err } + } + module.exports.Headers = __nccwpck_require__(9535).Headers + module.exports.Response = __nccwpck_require__(8111).Response + module.exports.Request = __nccwpck_require__(8816).Request + module.exports.FormData = __nccwpck_require__(8303).FormData + module.exports.File = __nccwpck_require__(7446).File + module.exports.FileReader = __nccwpck_require__(3558).FileReader - options = parseOptions(options) + const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(1272) - // Special cases where nothing can possibly be lower - if (options.includePrerelease && - (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { - return false - } - if (!options.includePrerelease && - (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { - return false - } + module.exports.setGlobalOrigin = setGlobalOrigin + module.exports.getGlobalOrigin = getGlobalOrigin - // Same direction increasing (> or >=) - if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { - return true - } - // Same direction decreasing (< or <=) - if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { - return true - } - // same SemVer and both sides are inclusive (<= or >=) - if ( - (this.semver.version === comp.semver.version) && - this.operator.includes('=') && comp.operator.includes('=')) { - return true - } - // opposite directions less than - if (cmp(this.semver, '<', comp.semver, options) && - this.operator.startsWith('>') && comp.operator.startsWith('<')) { - return true - } - // opposite directions greater than - if (cmp(this.semver, '>', comp.semver, options) && - this.operator.startsWith('<') && comp.operator.startsWith('>')) { - return true - } - return false - } + const { CacheStorage } = __nccwpck_require__(1616) + const { kConstruct } = __nccwpck_require__(3157) + + // Cache & CacheStorage are tightly coupled with fetch. Even if it may run + // in an older version of Node, it doesn't have any use without fetch. + module.exports.caches = new CacheStorage(kConstruct) } -module.exports = Comparator +if (util.nodeMajor >= 16) { + const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(3827) + + module.exports.deleteCookie = deleteCookie + module.exports.getCookies = getCookies + module.exports.getSetCookies = getSetCookies + module.exports.setCookie = setCookie + + const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(7253) -const parseOptions = __nccwpck_require__(785) -const { safeRe: re, t } = __nccwpck_require__(9523) -const cmp = __nccwpck_require__(5098) -const debug = __nccwpck_require__(427) -const SemVer = __nccwpck_require__(8088) -const Range = __nccwpck_require__(9828) + module.exports.parseMIMEType = parseMIMEType + module.exports.serializeAMimeType = serializeAMimeType +} + +if (util.nodeMajor >= 18 && hasCrypto) { + const { WebSocket } = __nccwpck_require__(2599) + + module.exports.WebSocket = WebSocket +} + +module.exports.request = makeDispatcher(api.request) +module.exports.stream = makeDispatcher(api.stream) +module.exports.pipeline = makeDispatcher(api.pipeline) +module.exports.connect = makeDispatcher(api.connect) +module.exports.upgrade = makeDispatcher(api.upgrade) + +module.exports.MockClient = MockClient +module.exports.MockPool = MockPool +module.exports.MockAgent = MockAgent +module.exports.mockErrors = mockErrors /***/ }), -/***/ 9828: +/***/ 916: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// hoisted class for cyclic dependency -class Range { - constructor (range, options) { - options = parseOptions(options) - if (range instanceof Range) { - if ( - range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease - ) { - return range - } else { - return new Range(range.raw, options) - } - } - if (range instanceof Comparator) { - // just put it in the set and return - this.raw = range.value - this.set = [[range]] - this.format() - return this - } +const { InvalidArgumentError } = __nccwpck_require__(5465) +const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(9271) +const DispatcherBase = __nccwpck_require__(4384) +const Pool = __nccwpck_require__(408) +const Client = __nccwpck_require__(2802) +const util = __nccwpck_require__(825) +const createRedirectInterceptor = __nccwpck_require__(7357) +const { WeakRef, FinalizationRegistry } = __nccwpck_require__(1341)() - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kMaxRedirections = Symbol('maxRedirections') +const kOnDrain = Symbol('onDrain') +const kFactory = Symbol('factory') +const kFinalizer = Symbol('finalizer') +const kOptions = Symbol('options') - // First reduce all whitespace as much as possible so we do not have to rely - // on potentially slow regexes like \s*. This is then stored and used for - // future error messages as well. - this.raw = range - .trim() - .split(/\s+/) - .join(' ') +function defaultFactory (origin, opts) { + return opts && opts.connections === 1 + ? new Client(origin, opts) + : new Pool(origin, opts) +} - // First, split on || - this.set = this.raw - .split('||') - // map the range to a 2d array of comparators - .map(r => this.parseRange(r.trim())) - // throw out any comparator lists that are empty - // this generally means that it was not a valid range, which is allowed - // in loose mode, but will still throw if the WHOLE range is invalid. - .filter(c => c.length) +class Agent extends DispatcherBase { + constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { + super() - if (!this.set.length) { - throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') } - // if we have any that are not the null set, throw out null sets. - if (this.set.length > 1) { - // keep the first one, in case they're all null sets - const first = this.set[0] - this.set = this.set.filter(c => !isNullSet(c[0])) - if (this.set.length === 0) { - this.set = [first] - } else if (this.set.length > 1) { - // if we have any that are *, then the range is just * - for (const c of this.set) { - if (c.length === 1 && isAny(c[0])) { - this.set = [c] - break - } - } - } + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') } - this.format() - } - - format () { - this.range = this.set - .map((comps) => comps.join(' ').trim()) - .join('||') - .trim() - return this.range - } - - toString () { - return this.range - } + if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } - parseRange (range) { - // memoize range parsing for performance. - // this is a very hot path, and fully deterministic. - const memoOpts = - (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | - (this.options.loose && FLAG_LOOSE) - const memoKey = memoOpts + ':' + range - const cached = cache.get(memoKey) - if (cached) { - return cached + if (connect && typeof connect !== 'function') { + connect = { ...connect } } - const loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) - debug('hyphen replace', range) + this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) + ? options.interceptors.Agent + : [createRedirectInterceptor({ maxRedirections })] - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range) + this[kOptions] = { ...util.deepClone(options), connect } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kMaxRedirections] = maxRedirections + this[kFactory] = factory + this[kClients] = new Map() + this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { + const ref = this[kClients].get(key) + if (ref !== undefined && ref.deref() === undefined) { + this[kClients].delete(key) + } + }) - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[t.TILDETRIM], tildeTrimReplace) - debug('tilde trim', range) + const agent = this - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[t.CARETTRIM], caretTrimReplace) - debug('caret trim', range) + this[kOnDrain] = (origin, targets) => { + agent.emit('drain', origin, [agent, ...targets]) + } - // At this point, the range is completely trimmed and - // ready to be split into comparators. + this[kOnConnect] = (origin, targets) => { + agent.emit('connect', origin, [agent, ...targets]) + } - let rangeList = range - .split(' ') - .map(comp => parseComparator(comp, this.options)) - .join(' ') - .split(/\s+/) - // >=0.0.0 is equivalent to * - .map(comp => replaceGTE0(comp, this.options)) + this[kOnDisconnect] = (origin, targets, err) => { + agent.emit('disconnect', origin, [agent, ...targets], err) + } - if (loose) { - // in loose mode, throw out any that are not valid comparators - rangeList = rangeList.filter(comp => { - debug('loose invalid filter', comp, this.options) - return !!comp.match(re[t.COMPARATORLOOSE]) - }) + this[kOnConnectionError] = (origin, targets, err) => { + agent.emit('connectionError', origin, [agent, ...targets], err) } - debug('range list', rangeList) + } - // if any comparators are the null set, then replace with JUST null set - // if more than one comparator, remove any * comparators - // also, don't include the same comparator more than once - const rangeMap = new Map() - const comparators = rangeList.map(comp => new Comparator(comp, this.options)) - for (const comp of comparators) { - if (isNullSet(comp)) { - return [comp] + get [kRunning] () { + let ret = 0 + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore next: gc is undeterministic */ + if (client) { + ret += client[kRunning] } - rangeMap.set(comp.value, comp) } - if (rangeMap.size > 1 && rangeMap.has('')) { - rangeMap.delete('') + return ret + } + + [kDispatch] (opts, handler) { + let key + if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { + key = String(opts.origin) + } else { + throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') + } + + const ref = this[kClients].get(key) + + let dispatcher = ref ? ref.deref() : null + if (!dispatcher) { + dispatcher = this[kFactory](opts.origin, this[kOptions]) + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].set(key, new WeakRef(dispatcher)) + this[kFinalizer].register(dispatcher, key) } - const result = [...rangeMap.values()] - cache.set(memoKey, result) - return result + return dispatcher.dispatch(opts, handler) } - intersects (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') + async [kClose] () { + const closePromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + closePromises.push(client.close()) + } } - return this.set.some((thisComparators) => { - return ( - isSatisfiable(thisComparators, options) && - range.set.some((rangeComparators) => { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every((thisComparator) => { - return rangeComparators.every((rangeComparator) => { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) + await Promise.all(closePromises) } - // if ANY of the sets match ALL of its comparators, then pass - test (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false + async [kDestroy] (err) { + const destroyPromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + destroyPromises.push(client.destroy(err)) } } - for (let i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false + await Promise.all(destroyPromises) } } -module.exports = Range +module.exports = Agent -const LRU = __nccwpck_require__(7129) -const cache = new LRU({ max: 1000 }) -const parseOptions = __nccwpck_require__(785) -const Comparator = __nccwpck_require__(1532) -const debug = __nccwpck_require__(427) -const SemVer = __nccwpck_require__(8088) -const { - safeRe: re, - t, - comparatorTrimReplace, - tildeTrimReplace, - caretTrimReplace, -} = __nccwpck_require__(9523) -const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = __nccwpck_require__(2293) +/***/ }), -const isNullSet = c => c.value === '<0.0.0-0' -const isAny = c => c.value === '' +/***/ 2411: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// take a set of comparators and determine whether there -// exists a version which can satisfy it -const isSatisfiable = (comparators, options) => { - let result = true - const remainingComparators = comparators.slice() - let testComparator = remainingComparators.pop() +const { addAbortListener } = __nccwpck_require__(825) +const { RequestAbortedError } = __nccwpck_require__(5465) - while (result && remainingComparators.length) { - result = remainingComparators.every((otherComparator) => { - return testComparator.intersects(otherComparator, options) - }) +const kListener = Symbol('kListener') +const kSignal = Symbol('kSignal') - testComparator = remainingComparators.pop() +function abort (self) { + if (self.abort) { + self.abort() + } else { + self.onError(new RequestAbortedError()) } - - return result } -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -const parseComparator = (comp, options) => { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} +function addSignal (self, signal) { + self[kSignal] = null + self[kListener] = null -const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + if (!signal) { + return + } -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 -// ~0.0.1 --> >=0.0.1 <0.1.0-0 -const replaceTildes = (comp, options) => { - return comp - .trim() - .split(/\s+/) - .map((c) => replaceTilde(c, options)) - .join(' ') + if (signal.aborted) { + abort(self) + return + } + + self[kSignal] = signal + self[kListener] = () => { + abort(self) + } + + addAbortListener(self[kSignal], self[kListener]) } -const replaceTilde = (comp, options) => { - const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] - return comp.replace(r, (_, M, m, p, pr) => { - debug('tilde', comp, _, M, m, p, pr) - let ret +function removeSignal (self) { + if (!self[kSignal]) { + return + } - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = `>=${M}.0.0 <${+M + 1}.0.0-0` - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0-0 - ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` - } else if (pr) { - debug('replaceTilde pr', pr) - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${+m + 1}.0-0` - } else { - // ~1.2.3 == >=1.2.3 <1.3.0-0 - ret = `>=${M}.${m}.${p - } <${M}.${+m + 1}.0-0` - } + if ('removeEventListener' in self[kSignal]) { + self[kSignal].removeEventListener('abort', self[kListener]) + } else { + self[kSignal].removeListener('abort', self[kListener]) + } - debug('tilde return', ret) - return ret - }) + self[kSignal] = null + self[kListener] = null } -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 -// ^1.2.3 --> >=1.2.3 <2.0.0-0 -// ^1.2.0 --> >=1.2.0 <2.0.0-0 -// ^0.0.1 --> >=0.0.1 <0.0.2-0 -// ^0.1.0 --> >=0.1.0 <0.2.0-0 -const replaceCarets = (comp, options) => { - return comp - .trim() - .split(/\s+/) - .map((c) => replaceCaret(c, options)) - .join(' ') +module.exports = { + addSignal, + removeSignal } -const replaceCaret = (comp, options) => { - debug('caret', comp, options) - const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] - const z = options.includePrerelease ? '-0' : '' - return comp.replace(r, (_, M, m, p, pr) => { - debug('caret', comp, _, M, m, p, pr) - let ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` - } else if (isX(p)) { - if (M === '0') { - ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` - } else { - ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${m}.${+p + 1}-0` - } else { - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${+m + 1}.0-0` - } - } else { - ret = `>=${M}.${m}.${p}-${pr - } <${+M + 1}.0.0-0` - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = `>=${M}.${m}.${p - }${z} <${M}.${m}.${+p + 1}-0` - } else { - ret = `>=${M}.${m}.${p - }${z} <${M}.${+m + 1}.0-0` - } - } else { - ret = `>=${M}.${m}.${p - } <${+M + 1}.0.0-0` - } - } - debug('caret return', ret) - return ret - }) -} +/***/ }), -const replaceXRanges = (comp, options) => { - debug('replaceXRanges', comp, options) - return comp - .split(/\s+/) - .map((c) => replaceXRange(c, options)) - .join(' ') -} +/***/ 2828: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const replaceXRange = (comp, options) => { - comp = comp.trim() - const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] - return comp.replace(r, (ret, gtlt, M, m, p, pr) => { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - const xM = isX(M) - const xm = xM || isX(m) - const xp = xm || isX(p) - const anyX = xp - if (gtlt === '=' && anyX) { - gtlt = '' - } - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' +const { AsyncResource } = __nccwpck_require__(852) +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(5465) +const util = __nccwpck_require__(825) +const { addSignal, removeSignal } = __nccwpck_require__(2411) - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 +class ConnectHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } - if (gtlt === '<') { - pr = '-0' - } + const { signal, opaque, responseHeaders } = opts - ret = `${gtlt + M}.${m}.${p}${pr}` - } else if (xm) { - ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` - } else if (xp) { - ret = `>=${M}.${m}.0${pr - } <${M}.${+m + 1}.0-0` + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } - debug('xRange return', ret) + super('UNDICI_CONNECT') - return ret - }) -} + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.callback = callback + this.abort = null -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -const replaceStars = (comp, options) => { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp - .trim() - .replace(re[t.STAR], '') -} + addSignal(this, signal) + } -const replaceGTE0 = (comp, options) => { - debug('replaceGTE0', comp, options) - return comp - .trim() - .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') -} + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 -const hyphenReplace = incPr => ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) => { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = `>=${fM}.0.0${incPr ? '-0' : ''}` - } else if (isX(fp)) { - from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` - } else if (fpr) { - from = `>=${from}` - } else { - from = `>=${from}${incPr ? '-0' : ''}` + this.abort = abort + this.context = context } - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = `<${+tM + 1}.0.0-0` - } else if (isX(tp)) { - to = `<${tM}.${+tm + 1}.0-0` - } else if (tpr) { - to = `<=${tM}.${tm}.${tp}-${tpr}` - } else if (incPr) { - to = `<${tM}.${tm}.${+tp + 1}-0` - } else { - to = `<=${to}` + onHeaders () { + throw new SocketError('bad connect', null) } - return `${from} ${to}`.trim() -} + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this -const testSet = (set, version, options) => { - for (let i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false + removeSignal(this) + + this.callback = null + + let headers = rawHeaders + // Indicates is an HTTP2Session + if (headers != null) { + headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) } + + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + socket, + opaque, + context + }) } - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (let i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === Comparator.ANY) { - continue - } + onError (err) { + const { callback, opaque } = this - if (set[i].semver.prerelease.length > 0) { - const allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } + } +} - // Version has a -pre, but it's not one of the ones we like. - return false +function connect (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + connect.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) } - return true + try { + const connectHandler = new ConnectHandler(opts, callback) + this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } } +module.exports = connect + /***/ }), -/***/ 8088: +/***/ 1620: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const debug = __nccwpck_require__(427) -const { MAX_LENGTH, MAX_SAFE_INTEGER } = __nccwpck_require__(2293) -const { safeRe: re, t } = __nccwpck_require__(9523) - -const parseOptions = __nccwpck_require__(785) -const { compareIdentifiers } = __nccwpck_require__(2463) -class SemVer { - constructor (version, options) { - options = parseOptions(options) - - if (version instanceof SemVer) { - if (version.loose === !!options.loose && - version.includePrerelease === !!options.includePrerelease) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) - } - if (version.length > MAX_LENGTH) { - throw new TypeError( - `version is longer than ${MAX_LENGTH} characters` - ) - } - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose - // this isn't actually relevant for versions, but keep it so that we - // don't run into trouble passing this.options around. - this.includePrerelease = !!options.includePrerelease +const { + Readable, + Duplex, + PassThrough +} = __nccwpck_require__(2781) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(5465) +const util = __nccwpck_require__(825) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(2411) +const assert = __nccwpck_require__(9491) - const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) +const kResume = Symbol('resume') - if (!m) { - throw new TypeError(`Invalid Version: ${version}`) - } +class PipelineRequest extends Readable { + constructor () { + super({ autoDestroy: true }) - this.raw = version + this[kResume] = null + } - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] + _read () { + const { [kResume]: resume } = this - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') + if (resume) { + this[kResume] = null + resume() } + } - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } + _destroy (err, callback) { + this._read() - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } + callback(err) + } +} - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map((id) => { - if (/^[0-9]+$/.test(id)) { - const num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } +class PipelineResponse extends Readable { + constructor (resume) { + super({ autoDestroy: true }) + this[kResume] = resume + } - this.build = m[5] ? m[5].split('.') : [] - this.format() + _read () { + this[kResume]() } - format () { - this.version = `${this.major}.${this.minor}.${this.patch}` - if (this.prerelease.length) { - this.version += `-${this.prerelease.join('.')}` + _destroy (err, callback) { + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() } - return this.version - } - toString () { - return this.version + callback(err) } +} - compare (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - if (typeof other === 'string' && other === this.version) { - return 0 - } - other = new SemVer(other, this.options) +class PipelineHandler extends AsyncResource { + constructor (opts, handler) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } - if (other.version === this.version) { - return 0 + if (typeof handler !== 'function') { + throw new InvalidArgumentError('invalid handler') } - return this.compareMain(other) || this.comparePre(other) - } + const { signal, method, opaque, onInfo, responseHeaders } = opts - compareMain (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } - return ( - compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) - ) - } - - comparePre (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') } - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') } - let i = 0 - do { - const a = this.prerelease[i] - const b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) - } + super('UNDICI_PIPELINE') - compareBuild (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.handler = handler + this.abort = null + this.context = null + this.onInfo = onInfo || null - let i = 0 - do { - const a = this.build[i] - const b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) - } + this.req = new PipelineRequest().on('error', util.nop) - // preminor will bump the version up to the next minor release, and immediately - // down to pre-release. premajor and prepatch work the same way. - inc (release, identifier, identifierBase) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier, identifierBase) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier, identifierBase) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier, identifierBase) - this.inc('pre', identifier, identifierBase) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier, identifierBase) - } - this.inc('pre', identifier, identifierBase) - break + this.ret = new Duplex({ + readableObjectMode: opts.objectMode, + autoDestroy: true, + read: () => { + const { body } = this - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if ( - this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0 - ) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ + if (body && body.resume) { + body.resume() } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. - case 'pre': { - const base = Number(identifierBase) ? 1 : 0 + }, + write: (chunk, encoding, callback) => { + const { req } = this - if (!identifier && identifierBase === false) { - throw new Error('invalid increment argument: identifier is empty') + if (req.push(chunk, encoding) || req._readableState.destroyed) { + callback() + } else { + req[kResume] = callback } + }, + destroy: (err, callback) => { + const { body, req, res, ret, abort } = this - if (this.prerelease.length === 0) { - this.prerelease = [base] - } else { - let i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - if (identifier === this.prerelease.join('.') && identifierBase === false) { - throw new Error('invalid increment argument: identifier already exists') - } - this.prerelease.push(base) - } + if (!err && !ret._readableState.endEmitted) { + err = new RequestAbortedError() } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - let prerelease = [identifier, base] - if (identifierBase === false) { - prerelease = [identifier] - } - if (compareIdentifiers(this.prerelease[0], identifier) === 0) { - if (isNaN(this.prerelease[1])) { - this.prerelease = prerelease - } - } else { - this.prerelease = prerelease - } + + if (abort && err) { + abort() } - break + + util.destroy(body, err) + util.destroy(req, err) + util.destroy(res, err) + + removeSignal(this) + + callback(err) } - default: - throw new Error(`invalid increment argument: ${release}`) - } - this.raw = this.format() - if (this.build.length) { - this.raw += `+${this.build.join('.')}` - } - return this + }).on('prefinish', () => { + const { req } = this + + // Node < 15 does not call _final in same tick. + req.push(null) + }) + + this.res = null + + addSignal(this, signal) } -} -module.exports = SemVer + onConnect (abort, context) { + const { ret, res } = this + assert(!res, 'pipeline cannot be retried') -/***/ }), + if (ret.destroyed) { + throw new RequestAbortedError() + } -/***/ 8848: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this.abort = abort + this.context = context + } -const parse = __nccwpck_require__(5925) -const clean = (version, options) => { - const s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} -module.exports = clean + onHeaders (statusCode, rawHeaders, resume) { + const { opaque, handler, context } = this + if (statusCode < 200) { + if (this.onInfo) { + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.onInfo({ statusCode, headers }) + } + return + } -/***/ }), + this.res = new PipelineResponse(resume) -/***/ 5098: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let body + try { + this.handler = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + body = this.runInAsyncScope(handler, null, { + statusCode, + headers, + opaque, + body: this.res, + context + }) + } catch (err) { + this.res.on('error', util.nop) + throw err + } -const eq = __nccwpck_require__(1898) -const neq = __nccwpck_require__(6017) -const gt = __nccwpck_require__(4123) -const gte = __nccwpck_require__(5522) -const lt = __nccwpck_require__(194) -const lte = __nccwpck_require__(7520) + if (!body || typeof body.on !== 'function') { + throw new InvalidReturnValueError('expected Readable') + } -const cmp = (a, op, b, loose) => { - switch (op) { - case '===': - if (typeof a === 'object') { - a = a.version - } - if (typeof b === 'object') { - b = b.version - } - return a === b + body + .on('data', (chunk) => { + const { ret, body } = this - case '!==': - if (typeof a === 'object') { - a = a.version - } - if (typeof b === 'object') { - b = b.version - } - return a !== b + if (!ret.push(chunk) && body.pause) { + body.pause() + } + }) + .on('error', (err) => { + const { ret } = this - case '': - case '=': - case '==': - return eq(a, b, loose) + util.destroy(ret, err) + }) + .on('end', () => { + const { ret } = this - case '!=': - return neq(a, b, loose) + ret.push(null) + }) + .on('close', () => { + const { ret } = this - case '>': - return gt(a, b, loose) + if (!ret._readableState.ended) { + util.destroy(ret, new RequestAbortedError()) + } + }) - case '>=': - return gte(a, b, loose) + this.body = body + } - case '<': - return lt(a, b, loose) + onData (chunk) { + const { res } = this + return res.push(chunk) + } - case '<=': - return lte(a, b, loose) + onComplete (trailers) { + const { res } = this + res.push(null) + } - default: - throw new TypeError(`Invalid operator: ${op}`) + onError (err) { + const { ret } = this + this.handler = null + util.destroy(ret, err) } } -module.exports = cmp + +function pipeline (opts, handler) { + try { + const pipelineHandler = new PipelineHandler(opts, handler) + this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) + return pipelineHandler.ret + } catch (err) { + return new PassThrough().destroy(err) + } +} + +module.exports = pipeline /***/ }), -/***/ 3466: +/***/ 6074: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const SemVer = __nccwpck_require__(8088) -const parse = __nccwpck_require__(5925) -const { safeRe: re, t } = __nccwpck_require__(9523) -const coerce = (version, options) => { - if (version instanceof SemVer) { - return version - } - if (typeof version === 'number') { - version = String(version) - } +const Readable = __nccwpck_require__(7698) +const { + InvalidArgumentError, + RequestAbortedError +} = __nccwpck_require__(5465) +const util = __nccwpck_require__(825) +const { getResolveErrorBodyCallback } = __nccwpck_require__(7282) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(2411) - if (typeof version !== 'string') { - return null - } +class RequestHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } - options = options || {} + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts - let match = null - if (!options.rtl) { - match = version.match(re[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - let next - while ((next = re[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') } - re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length - } - // leave it in a clean state - re[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) -} -module.exports = coerce + if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { + throw new InvalidArgumentError('invalid highWaterMark') + } + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } -/***/ }), + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } -/***/ 2156: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } -const SemVer = __nccwpck_require__(8088) -const compareBuild = (a, b, loose) => { - const versionA = new SemVer(a, loose) - const versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) -} -module.exports = compareBuild + super('UNDICI_REQUEST') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.res = null + this.abort = null + this.body = body + this.trailers = {} + this.context = null + this.onInfo = onInfo || null + this.throwOnError = throwOnError + this.highWaterMark = highWaterMark -/***/ }), + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } -/***/ 2804: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + addSignal(this, signal) + } -const compare = __nccwpck_require__(4309) -const compareLoose = (a, b) => compare(a, b, true) -module.exports = compareLoose + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + this.abort = abort + this.context = context + } -/***/ }), + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this -/***/ 4309: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) -const SemVer = __nccwpck_require__(8088) -const compare = (a, b, loose) => - new SemVer(a, loose).compare(new SemVer(b, loose)) + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } -module.exports = compare + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + const body = new Readable({ resume, abort, contentType, highWaterMark }) + this.callback = null + this.res = body + if (callback !== null) { + if (this.throwOnError && statusCode >= 400) { + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body, contentType, statusCode, statusMessage, headers } + ) + } else { + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + trailers: this.trailers, + opaque, + body, + context + }) + } + } + } -/***/ }), + onData (chunk) { + const { res } = this + return res.push(chunk) + } -/***/ 4297: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + onComplete (trailers) { + const { res } = this -const parse = __nccwpck_require__(5925) + removeSignal(this) -const diff = (version1, version2) => { - const v1 = parse(version1, null, true) - const v2 = parse(version2, null, true) - const comparison = v1.compare(v2) + util.parseHeaders(trailers, this.trailers) - if (comparison === 0) { - return null + res.push(null) } - const v1Higher = comparison > 0 - const highVersion = v1Higher ? v1 : v2 - const lowVersion = v1Higher ? v2 : v1 - const highHasPre = !!highVersion.prerelease.length - const lowHasPre = !!lowVersion.prerelease.length + onError (err) { + const { res, callback, body, opaque } = this - if (lowHasPre && !highHasPre) { - // Going from prerelease -> no prerelease requires some special casing + removeSignal(this) - // If the low version has only a major, then it will always be a major - // Some examples: - // 1.0.0-1 -> 1.0.0 - // 1.0.0-1 -> 1.1.1 - // 1.0.0-1 -> 2.0.0 - if (!lowVersion.patch && !lowVersion.minor) { - return 'major' + if (callback) { + // TODO: Does this need queueMicrotask? + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } - // Otherwise it can be determined by checking the high version - - if (highVersion.patch) { - // anything higher than a patch bump would result in the wrong version - return 'patch' + if (res) { + this.res = null + // Ensure all queued handlers are invoked before destroying res. + queueMicrotask(() => { + util.destroy(res, err) + }) } - if (highVersion.minor) { - // anything higher than a minor bump would result in the wrong version - return 'minor' + if (body) { + this.body = null + util.destroy(body, err) } - - // bumping major/minor/patch all have same result - return 'major' - } - - // add the `pre` prefix if we are going to a prerelease version - const prefix = highHasPre ? 'pre' : '' - - if (v1.major !== v2.major) { - return prefix + 'major' } +} - if (v1.minor !== v2.minor) { - return prefix + 'minor' +function request (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + request.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) } - if (v1.patch !== v2.patch) { - return prefix + 'patch' + try { + this.dispatch(opts, new RequestHandler(opts, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } - - // high and low are preleases - return 'prerelease' } -module.exports = diff +module.exports = request +module.exports.RequestHandler = RequestHandler /***/ }), -/***/ 1898: +/***/ 6056: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const compare = __nccwpck_require__(4309) -const eq = (a, b, loose) => compare(a, b, loose) === 0 -module.exports = eq - -/***/ }), -/***/ 4123: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const { finished, PassThrough } = __nccwpck_require__(2781) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(5465) +const util = __nccwpck_require__(825) +const { getResolveErrorBodyCallback } = __nccwpck_require__(7282) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(2411) -const compare = __nccwpck_require__(4309) -const gt = (a, b, loose) => compare(a, b, loose) > 0 -module.exports = gt +class StreamHandler extends AsyncResource { + constructor (opts, factory, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts -/***/ }), + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } -/***/ 5522: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (typeof factory !== 'function') { + throw new InvalidArgumentError('invalid factory') + } -const compare = __nccwpck_require__(4309) -const gte = (a, b, loose) => compare(a, b, loose) >= 0 -module.exports = gte + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } -/***/ }), + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } -/***/ 900: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + super('UNDICI_STREAM') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } -const SemVer = __nccwpck_require__(8088) + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.factory = factory + this.callback = callback + this.res = null + this.abort = null + this.context = null + this.trailers = null + this.body = body + this.onInfo = onInfo || null + this.throwOnError = throwOnError || false -const inc = (version, release, options, identifier, identifierBase) => { - if (typeof (options) === 'string') { - identifierBase = identifier - identifier = options - options = undefined - } + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } - try { - return new SemVer( - version instanceof SemVer ? version.version : version, - options - ).inc(release, identifier, identifierBase).version - } catch (er) { - return null + addSignal(this, signal) } -} -module.exports = inc + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } -/***/ }), + this.abort = abort + this.context = context + } -/***/ 194: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { factory, opaque, context, callback, responseHeaders } = this -const compare = __nccwpck_require__(4309) -const lt = (a, b, loose) => compare(a, b, loose) < 0 -module.exports = lt + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } -/***/ }), + this.factory = null -/***/ 7520: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let res -const compare = __nccwpck_require__(4309) -const lte = (a, b, loose) => compare(a, b, loose) <= 0 -module.exports = lte + if (this.throwOnError && statusCode >= 400) { + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + res = new PassThrough() + this.callback = null + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body: res, contentType, statusCode, statusMessage, headers } + ) + } else { + if (factory === null) { + return + } -/***/ }), + res = this.runInAsyncScope(factory, null, { + statusCode, + headers, + opaque, + context + }) -/***/ 6688: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if ( + !res || + typeof res.write !== 'function' || + typeof res.end !== 'function' || + typeof res.on !== 'function' + ) { + throw new InvalidReturnValueError('expected Writable') + } -const SemVer = __nccwpck_require__(8088) -const major = (a, loose) => new SemVer(a, loose).major -module.exports = major + // TODO: Avoid finished. It registers an unnecessary amount of listeners. + finished(res, { readable: false }, (err) => { + const { callback, res, opaque, trailers, abort } = this + this.res = null + if (err || !res.readable) { + util.destroy(res, err) + } -/***/ }), + this.callback = null + this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) -/***/ 8447: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (err) { + abort() + } + }) + } -const SemVer = __nccwpck_require__(8088) -const minor = (a, loose) => new SemVer(a, loose).minor -module.exports = minor + res.on('drain', resume) + this.res = res -/***/ }), + const needDrain = res.writableNeedDrain !== undefined + ? res.writableNeedDrain + : res._writableState && res._writableState.needDrain -/***/ 6017: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return needDrain !== true + } -const compare = __nccwpck_require__(4309) -const neq = (a, b, loose) => compare(a, b, loose) !== 0 -module.exports = neq + onData (chunk) { + const { res } = this + return res ? res.write(chunk) : true + } -/***/ }), + onComplete (trailers) { + const { res } = this -/***/ 5925: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + removeSignal(this) -const SemVer = __nccwpck_require__(8088) -const parse = (version, options, throwErrors = false) => { - if (version instanceof SemVer) { - return version - } - try { - return new SemVer(version, options) - } catch (er) { - if (!throwErrors) { - return null + if (!res) { + return } - throw er - } -} - -module.exports = parse - -/***/ }), + this.trailers = util.parseHeaders(trailers) -/***/ 2866: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + res.end() + } -const SemVer = __nccwpck_require__(8088) -const patch = (a, loose) => new SemVer(a, loose).patch -module.exports = patch + onError (err) { + const { res, callback, opaque, body } = this + removeSignal(this) -/***/ }), + this.factory = null -/***/ 6014: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (res) { + this.res = null + util.destroy(res, err) + } else if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } -const parse = __nccwpck_require__(5925) -const prerelease = (version, options) => { - const parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null + if (body) { + this.body = null + util.destroy(body, err) + } + } } -module.exports = prerelease - - -/***/ }), - -/***/ 6417: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compare = __nccwpck_require__(4309) -const rcompare = (a, b, loose) => compare(b, a, loose) -module.exports = rcompare - -/***/ }), - -/***/ 8701: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const compareBuild = __nccwpck_require__(2156) -const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) -module.exports = rsort - - -/***/ }), - -/***/ 6055: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function stream (opts, factory, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } -const Range = __nccwpck_require__(9828) -const satisfies = (version, range, options) => { try { - range = new Range(range, options) - } catch (er) { - return false + this.dispatch(opts, new StreamHandler(opts, factory, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } - return range.test(version) } -module.exports = satisfies + +module.exports = stream /***/ }), -/***/ 1426: +/***/ 3193: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const compareBuild = __nccwpck_require__(2156) -const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) -module.exports = sort -/***/ }), +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(5465) +const { AsyncResource } = __nccwpck_require__(852) +const util = __nccwpck_require__(825) +const { addSignal, removeSignal } = __nccwpck_require__(2411) +const assert = __nccwpck_require__(9491) -/***/ 9601: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +class UpgradeHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -const parse = __nccwpck_require__(5925) -const valid = (version, options) => { - const v = parse(version, options) - return v ? v.version : null -} -module.exports = valid + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + const { signal, opaque, responseHeaders } = opts -/***/ }), + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } -/***/ 1383: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + super('UNDICI_UPGRADE') -// just pre-load all the stuff that index.js lazily exports -const internalRe = __nccwpck_require__(9523) -const constants = __nccwpck_require__(2293) -const SemVer = __nccwpck_require__(8088) -const identifiers = __nccwpck_require__(2463) -const parse = __nccwpck_require__(5925) -const valid = __nccwpck_require__(9601) -const clean = __nccwpck_require__(8848) -const inc = __nccwpck_require__(900) -const diff = __nccwpck_require__(4297) -const major = __nccwpck_require__(6688) -const minor = __nccwpck_require__(8447) -const patch = __nccwpck_require__(2866) -const prerelease = __nccwpck_require__(6014) -const compare = __nccwpck_require__(4309) -const rcompare = __nccwpck_require__(6417) -const compareLoose = __nccwpck_require__(2804) -const compareBuild = __nccwpck_require__(2156) -const sort = __nccwpck_require__(1426) -const rsort = __nccwpck_require__(8701) -const gt = __nccwpck_require__(4123) -const lt = __nccwpck_require__(194) -const eq = __nccwpck_require__(1898) -const neq = __nccwpck_require__(6017) -const gte = __nccwpck_require__(5522) -const lte = __nccwpck_require__(7520) -const cmp = __nccwpck_require__(5098) -const coerce = __nccwpck_require__(3466) -const Comparator = __nccwpck_require__(1532) -const Range = __nccwpck_require__(9828) -const satisfies = __nccwpck_require__(6055) -const toComparators = __nccwpck_require__(2706) -const maxSatisfying = __nccwpck_require__(579) -const minSatisfying = __nccwpck_require__(832) -const minVersion = __nccwpck_require__(4179) -const validRange = __nccwpck_require__(2098) -const outside = __nccwpck_require__(420) -const gtr = __nccwpck_require__(9380) -const ltr = __nccwpck_require__(3323) -const intersects = __nccwpck_require__(7008) -const simplifyRange = __nccwpck_require__(5297) -const subset = __nccwpck_require__(7863) -module.exports = { - parse, - valid, - clean, - inc, - diff, - major, - minor, - patch, - prerelease, - compare, - rcompare, - compareLoose, - compareBuild, - sort, - rsort, - gt, - lt, - eq, - neq, - gte, - lte, - cmp, - coerce, - Comparator, - Range, - satisfies, - toComparators, - maxSatisfying, - minSatisfying, - minVersion, - validRange, - outside, - gtr, - ltr, - intersects, - simplifyRange, - subset, - SemVer, - re: internalRe.re, - src: internalRe.src, - tokens: internalRe.t, - SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, - RELEASE_TYPES: constants.RELEASE_TYPES, - compareIdentifiers: identifiers.compareIdentifiers, - rcompareIdentifiers: identifiers.rcompareIdentifiers, -} + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.abort = null + this.context = null + addSignal(this, signal) + } -/***/ }), + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = null + } + + onHeaders () { + throw new SocketError('bad upgrade', null) + } -/***/ 2293: -/***/ ((module) => { + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -const SEMVER_SPEC_VERSION = '2.0.0' + assert.strictEqual(statusCode, 101) -const MAX_LENGTH = 256 -const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || -/* istanbul ignore next */ 9007199254740991 + removeSignal(this) -// Max safe segment length for coercion. -const MAX_SAFE_COMPONENT_LENGTH = 16 + this.callback = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.runInAsyncScope(callback, null, null, { + headers, + socket, + opaque, + context + }) + } -// Max safe length for a build identifier. The max length minus 6 characters for -// the shortest version with a build 0.0.0+BUILD. -const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + onError (err) { + const { callback, opaque } = this -const RELEASE_TYPES = [ - 'major', - 'premajor', - 'minor', - 'preminor', - 'patch', - 'prepatch', - 'prerelease', -] + removeSignal(this) -module.exports = { - MAX_LENGTH, - MAX_SAFE_COMPONENT_LENGTH, - MAX_SAFE_BUILD_LENGTH, - MAX_SAFE_INTEGER, - RELEASE_TYPES, - SEMVER_SPEC_VERSION, - FLAG_INCLUDE_PRERELEASE: 0b001, - FLAG_LOOSE: 0b010, + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + } } +function upgrade (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + upgrade.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } -/***/ }), - -/***/ 427: -/***/ ((module) => { - -const debug = ( - typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG) -) ? (...args) => console.error('SEMVER', ...args) - : () => {} + try { + const upgradeHandler = new UpgradeHandler(opts, callback) + this.dispatch({ + ...opts, + method: opts.method || 'GET', + upgrade: opts.protocol || 'Websocket' + }, upgradeHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} -module.exports = debug +module.exports = upgrade /***/ }), -/***/ 2463: -/***/ ((module) => { - -const numeric = /^[0-9]+$/ -const compareIdentifiers = (a, b) => { - const anum = numeric.test(a) - const bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } +/***/ 3249: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} -const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) -module.exports = { - compareIdentifiers, - rcompareIdentifiers, -} +module.exports.request = __nccwpck_require__(6074) +module.exports.stream = __nccwpck_require__(6056) +module.exports.pipeline = __nccwpck_require__(1620) +module.exports.upgrade = __nccwpck_require__(3193) +module.exports.connect = __nccwpck_require__(2828) /***/ }), -/***/ 785: -/***/ ((module) => { +/***/ 7698: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// parse out just the options we care about -const looseOption = Object.freeze({ loose: true }) -const emptyOpts = Object.freeze({ }) -const parseOptions = options => { - if (!options) { - return emptyOpts - } +// Ported from https://github.com/nodejs/undici/pull/907 - if (typeof options !== 'object') { - return looseOption - } - return options -} -module.exports = parseOptions +const assert = __nccwpck_require__(9491) +const { Readable } = __nccwpck_require__(2781) +const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(5465) +const util = __nccwpck_require__(825) +const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(825) -/***/ }), +let Blob -/***/ 9523: -/***/ ((module, exports, __nccwpck_require__) => { +const kConsume = Symbol('kConsume') +const kReading = Symbol('kReading') +const kBody = Symbol('kBody') +const kAbort = Symbol('abort') +const kContentType = Symbol('kContentType') -const { - MAX_SAFE_COMPONENT_LENGTH, - MAX_SAFE_BUILD_LENGTH, - MAX_LENGTH, -} = __nccwpck_require__(2293) -const debug = __nccwpck_require__(427) -exports = module.exports = {} +const noop = () => {} -// The actual regexps go on exports.re -const re = exports.re = [] -const safeRe = exports.safeRe = [] -const src = exports.src = [] -const t = exports.t = {} -let R = 0 +module.exports = class BodyReadable extends Readable { + constructor ({ + resume, + abort, + contentType = '', + highWaterMark = 64 * 1024 // Same as nodejs fs streams. + }) { + super({ + autoDestroy: true, + read: resume, + highWaterMark + }) -const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + this._readableState.dataEmitted = false -// Replace some greedy regex tokens to prevent regex dos issues. These regex are -// used internally via the safeRe object since all inputs in this library get -// normalized first to trim and collapse all extra whitespace. The original -// regexes are exported for userland consumption and lower level usage. A -// future breaking change could export the safer regex only with a note that -// all input should have extra whitespace removed. -const safeRegexReplacements = [ - ['\\s', 1], - ['\\d', MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], -] + this[kAbort] = abort + this[kConsume] = null + this[kBody] = null + this[kContentType] = contentType -const makeSafeRegex = (value) => { - for (const [token, max] of safeRegexReplacements) { - value = value - .split(`${token}*`).join(`${token}{0,${max}}`) - .split(`${token}+`).join(`${token}{1,${max}}`) + // Is stream being consumed through Readable API? + // This is an optimization so that we avoid checking + // for 'data' and 'readable' listeners in the hot path + // inside push(). + this[kReading] = false } - return value -} -const createToken = (name, value, isGlobal) => { - const safe = makeSafeRegex(value) - const index = R++ - debug(name, index, value) - t[name] = index - src[index] = value - re[index] = new RegExp(value, isGlobal ? 'g' : undefined) - safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) -} + destroy (err) { + if (this.destroyed) { + // Node < 16 + return this + } -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. + if (err) { + this[kAbort]() + } -createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') -createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + return super.destroy(err) + } -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. + emit (ev, ...args) { + if (ev === 'data') { + // Node < 16.7 + this._readableState.dataEmitted = true + } else if (ev === 'error') { + // Node < 16 + this._readableState.errorEmitted = true + } + return super.emit(ev, ...args) + } -createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + on (ev, ...args) { + if (ev === 'data' || ev === 'readable') { + this[kReading] = true + } + return super.on(ev, ...args) + } -// ## Main Version -// Three dot-separated numeric identifiers. + addListener (ev, ...args) { + return this.on(ev, ...args) + } -createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + - `(${src[t.NUMERICIDENTIFIER]})\\.` + - `(${src[t.NUMERICIDENTIFIER]})`) + off (ev, ...args) { + const ret = super.off(ev, ...args) + if (ev === 'data' || ev === 'readable') { + this[kReading] = ( + this.listenerCount('data') > 0 || + this.listenerCount('readable') > 0 + ) + } + return ret + } -createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + - `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + - `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + removeListener (ev, ...args) { + return this.off(ev, ...args) + } -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. + push (chunk) { + if (this[kConsume] && chunk !== null && this.readableLength === 0) { + consumePush(this[kConsume], chunk) + return this[kReading] ? super.push(chunk) : true + } + return super.push(chunk) + } -createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] -}|${src[t.NONNUMERICIDENTIFIER]})`) + // https://fetch.spec.whatwg.org/#dom-body-text + async text () { + return consume(this, 'text') + } -createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] -}|${src[t.NONNUMERICIDENTIFIER]})`) + // https://fetch.spec.whatwg.org/#dom-body-json + async json () { + return consume(this, 'json') + } -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. + // https://fetch.spec.whatwg.org/#dom-body-blob + async blob () { + return consume(this, 'blob') + } -createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] -}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + // https://fetch.spec.whatwg.org/#dom-body-arraybuffer + async arrayBuffer () { + return consume(this, 'arrayBuffer') + } -createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] -}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + // https://fetch.spec.whatwg.org/#dom-body-formdata + async formData () { + // TODO: Implement. + throw new NotSupportedError() + } -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. + // https://fetch.spec.whatwg.org/#dom-body-bodyused + get bodyUsed () { + return util.isDisturbed(this) + } -createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + // https://fetch.spec.whatwg.org/#dom-body-body + get body () { + if (!this[kBody]) { + this[kBody] = ReadableStreamFrom(this) + if (this[kConsume]) { + // TODO: Is this the best way to force a lock? + this[kBody].getReader() // Ensure stream is locked. + assert(this[kBody].locked) + } + } + return this[kBody] + } -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. + dump (opts) { + let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 + const signal = opts && opts.signal -createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] -}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + if (signal) { + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') + } + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) + } + } -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. + if (this.closed) { + return Promise.resolve(null) + } -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop -createToken('FULLPLAIN', `v?${src[t.MAINVERSION] -}${src[t.PRERELEASE]}?${ - src[t.BUILD]}?`) + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) + } +} -createToken('FULL', `^${src[t.FULLPLAIN]}$`) +// https://streams.spec.whatwg.org/#readablestream-locked +function isLocked (self) { + // Consume is an implicit lock. + return (self[kBody] && self[kBody].locked === true) || self[kConsume] +} -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] -}${src[t.PRERELEASELOOSE]}?${ - src[t.BUILD]}?`) +// https://fetch.spec.whatwg.org/#body-unusable +function isUnusable (self) { + return util.isDisturbed(self) || isLocked(self) +} -createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) +async function consume (stream, type) { + if (isUnusable(stream)) { + throw new TypeError('unusable') + } -createToken('GTLT', '((?:<|>)?=?)') + assert(!stream[kConsume]) -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) -createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + return new Promise((resolve, reject) => { + stream[kConsume] = { + type, + stream, + resolve, + reject, + length: 0, + body: [] + } -createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + - `(?:${src[t.PRERELEASE]})?${ - src[t.BUILD]}?` + - `)?)?`) + stream + .on('error', function (err) { + consumeFinish(this[kConsume], err) + }) + .on('close', function () { + if (this[kConsume].body !== null) { + consumeFinish(this[kConsume], new RequestAbortedError()) + } + }) -createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:${src[t.PRERELEASELOOSE]})?${ - src[t.BUILD]}?` + - `)?)?`) + process.nextTick(consumeStart, stream[kConsume]) + }) +} -createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) -createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) +function consumeStart (consume) { + if (consume.body === null) { + return + } -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -createToken('COERCE', `${'(^|[^\\d])' + - '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + - `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + - `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + - `(?:$|[^\\d])`) -createToken('COERCERTL', src[t.COERCE], true) + const { _readableState: state } = consume.stream -// Tilde ranges. -// Meaning is "reasonably at or greater than" -createToken('LONETILDE', '(?:~>?)') + for (const chunk of state.buffer) { + consumePush(consume, chunk) + } -createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) -exports.tildeTrimReplace = '$1~' + if (state.endEmitted) { + consumeEnd(this[kConsume]) + } else { + consume.stream.on('end', function () { + consumeEnd(this[kConsume]) + }) + } -createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) -createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + consume.stream.resume() -// Caret ranges. -// Meaning is "at least and backwards compatible with" -createToken('LONECARET', '(?:\\^)') + while (consume.stream.read() != null) { + // Loop + } +} -createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) -exports.caretTrimReplace = '$1^' +function consumeEnd (consume) { + const { type, body, resolve, stream, length } = consume -createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) -createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + try { + if (type === 'text') { + resolve(toUSVString(Buffer.concat(body))) + } else if (type === 'json') { + resolve(JSON.parse(Buffer.concat(body))) + } else if (type === 'arrayBuffer') { + const dst = new Uint8Array(length) -// A simple gt/lt/eq thing, or just "" to indicate "any version" -createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) -createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + let pos = 0 + for (const buf of body) { + dst.set(buf, pos) + pos += buf.byteLength + } -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] -}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) -exports.comparatorTrimReplace = '$1$2$3' + resolve(dst.buffer) + } else if (type === 'blob') { + if (!Blob) { + Blob = (__nccwpck_require__(4300).Blob) + } + resolve(new Blob(body, { type: stream[kContentType] })) + } -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + - `\\s+-\\s+` + - `(${src[t.XRANGEPLAIN]})` + - `\\s*$`) + consumeFinish(consume) + } catch (err) { + stream.destroy(err) + } +} + +function consumePush (consume, chunk) { + consume.length += chunk.length + consume.body.push(chunk) +} + +function consumeFinish (consume, err) { + if (consume.body === null) { + return + } -createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + - `\\s+-\\s+` + - `(${src[t.XRANGEPLAINLOOSE]})` + - `\\s*$`) + if (err) { + consume.reject(err) + } else { + consume.resolve() + } -// Star ranges basically just allow anything at all. -createToken('STAR', '(<|>)?=?\\s*\\*') -// >=0.0.0 is like a star -createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') -createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') + consume.type = null + consume.stream = null + consume.resolve = null + consume.reject = null + consume.length = 0 + consume.body = null +} /***/ }), -/***/ 9380: +/***/ 7282: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// Determine if version is greater than all the versions possible in the range. -const outside = __nccwpck_require__(420) -const gtr = (version, range, options) => outside(version, range, '>', options) -module.exports = gtr +const assert = __nccwpck_require__(9491) +const { + ResponseStatusCodeError +} = __nccwpck_require__(5465) +const { toUSVString } = __nccwpck_require__(825) +async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { + assert(body) -/***/ }), + let chunks = [] + let limit = 0 -/***/ 7008: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + for await (const chunk of body) { + chunks.push(chunk) + limit += chunk.length + if (limit > 128 * 1024) { + chunks = null + break + } + } -const Range = __nccwpck_require__(9828) -const intersects = (r1, r2, options) => { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2, options) -} -module.exports = intersects + if (statusCode === 204 || !contentType || !chunks) { + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) + return + } + try { + if (contentType.startsWith('application/json')) { + const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } -/***/ }), + if (contentType.startsWith('text/')) { + const payload = toUSVString(Buffer.concat(chunks)) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + } catch (err) { + // Process in a fallback if error + } -/***/ 3323: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) +} -const outside = __nccwpck_require__(420) -// Determine if version is less than all the versions possible in the range -const ltr = (version, range, options) => outside(version, range, '<', options) -module.exports = ltr +module.exports = { getResolveErrorBodyCallback } /***/ }), -/***/ 579: +/***/ 6399: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const SemVer = __nccwpck_require__(8088) -const Range = __nccwpck_require__(9828) -const maxSatisfying = (versions, range, options) => { - let max = null - let maxSV = null - let rangeObj = null - try { - rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach((v) => { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } - } - }) - return max -} -module.exports = maxSatisfying +const { + BalancedPoolMissingUpstreamError, + InvalidArgumentError +} = __nccwpck_require__(5465) +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} = __nccwpck_require__(2002) +const Pool = __nccwpck_require__(408) +const { kUrl, kInterceptors } = __nccwpck_require__(9271) +const { parseOrigin } = __nccwpck_require__(825) +const kFactory = Symbol('factory') -/***/ }), +const kOptions = Symbol('options') +const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') +const kCurrentWeight = Symbol('kCurrentWeight') +const kIndex = Symbol('kIndex') +const kWeight = Symbol('kWeight') +const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') +const kErrorPenalty = Symbol('kErrorPenalty') -/***/ 832: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function getGreatestCommonDivisor (a, b) { + if (b === 0) return a + return getGreatestCommonDivisor(b, a % b) +} -const SemVer = __nccwpck_require__(8088) -const Range = __nccwpck_require__(9828) -const minSatisfying = (versions, range, options) => { - let min = null - let minSV = null - let rangeObj = null - try { - rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach((v) => { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min +function defaultFactory (origin, opts) { + return new Pool(origin, opts) } -module.exports = minSatisfying +class BalancedPool extends PoolBase { + constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { + super() -/***/ }), + this[kOptions] = opts + this[kIndex] = -1 + this[kCurrentWeight] = 0 -/***/ 4179: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 + this[kErrorPenalty] = this[kOptions].errorPenalty || 15 -const SemVer = __nccwpck_require__(8088) -const Range = __nccwpck_require__(9828) -const gt = __nccwpck_require__(4123) + if (!Array.isArray(upstreams)) { + upstreams = [upstreams] + } -const minVersion = (range, loose) => { - range = new Range(range, loose) + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } - let minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } + this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) + ? opts.interceptors.BalancedPool + : [] + this[kFactory] = factory - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver + for (const upstream of upstreams) { + this.addUpstream(upstream) + } + this._updateBalancedPoolStats() } - minver = null - for (let i = 0; i < range.set.length; ++i) { - const comparators = range.set[i] + addUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin - let setMin = null - comparators.forEach((comparator) => { - // Clone to avoid manipulating the comparator's semver object. - const compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!setMin || gt(compver, setMin)) { - setMin = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error(`Unexpected operation: ${comparator.operator}`) + if (this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + ))) { + return this + } + const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) + + this[kAddClient](pool) + pool.on('connect', () => { + pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) + }) + + pool.on('connectionError', () => { + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + }) + + pool.on('disconnect', (...args) => { + const err = args[2] + if (err && err.code === 'UND_ERR_SOCKET') { + // decrease the weight of the pool. + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() } }) - if (setMin && (!minver || gt(minver, setMin))) { - minver = setMin - } - } - if (minver && range.test(minver)) { - return minver - } + for (const client of this[kClients]) { + client[kWeight] = this[kMaxWeightPerServer] + } - return null -} -module.exports = minVersion + this._updateBalancedPoolStats() + return this + } -/***/ }), + _updateBalancedPoolStats () { + this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) + } -/***/ 420: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + removeUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin -const SemVer = __nccwpck_require__(8088) -const Comparator = __nccwpck_require__(1532) -const { ANY } = Comparator -const Range = __nccwpck_require__(9828) -const satisfies = __nccwpck_require__(6055) -const gt = __nccwpck_require__(4123) -const lt = __nccwpck_require__(194) -const lte = __nccwpck_require__(7520) -const gte = __nccwpck_require__(5522) + const pool = this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + )) -const outside = (version, range, hilo, options) => { - version = new SemVer(version, options) - range = new Range(range, options) + if (pool) { + this[kRemoveClient](pool) + } - let gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') + return this } - // If it satisfies the range it is not outside - if (satisfies(version, range, options)) { - return false + get upstreams () { + return this[kClients] + .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) + .map((p) => p[kUrl].origin) } - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. + [kGetDispatcher] () { + // We validate that pools is greater than 0, + // otherwise we would have to wait until an upstream + // is added, which might never happen. + if (this[kClients].length === 0) { + throw new BalancedPoolMissingUpstreamError() + } - for (let i = 0; i < range.set.length; ++i) { - const comparators = range.set[i] + const dispatcher = this[kClients].find(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) - let high = null - let low = null + if (!dispatcher) { + return + } - comparators.forEach((comparator) => { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) + const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false + if (allClientsBusy) { + return } - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} + let counter = 0 -module.exports = outside + let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) + while (counter++ < this[kClients].length) { + this[kIndex] = (this[kIndex] + 1) % this[kClients].length + const pool = this[kClients][this[kIndex]] -/***/ }), + // find pool index with the largest weight + if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { + maxWeightIndex = this[kIndex] + } -/***/ 5297: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // decrease the current weight every `this[kClients].length`. + if (this[kIndex] === 0) { + // Set the current weight to the next lower weight. + this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] -// given a set of versions and a range, create a "simplified" range -// that includes the same versions that the original range does -// If the original range is shorter than the simplified one, return that. -const satisfies = __nccwpck_require__(6055) -const compare = __nccwpck_require__(4309) -module.exports = (versions, range, options) => { - const set = [] - let first = null - let prev = null - const v = versions.sort((a, b) => compare(a, b, options)) - for (const version of v) { - const included = satisfies(version, range, options) - if (included) { - prev = version - if (!first) { - first = version + if (this[kCurrentWeight] <= 0) { + this[kCurrentWeight] = this[kMaxWeightPerServer] + } } - } else { - if (prev) { - set.push([first, prev]) + if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { + return pool } - prev = null - first = null } - } - if (first) { - set.push([first, null]) - } - const ranges = [] - for (const [min, max] of set) { - if (min === max) { - ranges.push(min) - } else if (!max && min === v[0]) { - ranges.push('*') - } else if (!max) { - ranges.push(`>=${min}`) - } else if (min === v[0]) { - ranges.push(`<=${max}`) - } else { - ranges.push(`${min} - ${max}`) - } + this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] + this[kIndex] = maxWeightIndex + return this[kClients][maxWeightIndex] } - const simplified = ranges.join(' || ') - const original = typeof range.raw === 'string' ? range.raw : String(range) - return simplified.length < original.length ? simplified : range } +module.exports = BalancedPool + /***/ }), -/***/ 7863: +/***/ 3616: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const Range = __nccwpck_require__(9828) -const Comparator = __nccwpck_require__(1532) -const { ANY } = Comparator -const satisfies = __nccwpck_require__(6055) -const compare = __nccwpck_require__(4309) - -// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: -// - Every simple range `r1, r2, ...` is a null set, OR -// - Every simple range `r1, r2, ...` which is not a null set is a subset of -// some `R1, R2, ...` -// -// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: -// - If c is only the ANY comparator -// - If C is only the ANY comparator, return true -// - Else if in prerelease mode, return false -// - else replace c with `[>=0.0.0]` -// - If C is only the ANY comparator -// - if in prerelease mode, return true -// - else replace C with `[>=0.0.0]` -// - Let EQ be the set of = comparators in c -// - If EQ is more than one, return true (null set) -// - Let GT be the highest > or >= comparator in c -// - Let LT be the lowest < or <= comparator in c -// - If GT and LT, and GT.semver > LT.semver, return true (null set) -// - If any C is a = range, and GT or LT are set, return false -// - If EQ -// - If GT, and EQ does not satisfy GT, return true (null set) -// - If LT, and EQ does not satisfy LT, return true (null set) -// - If EQ satisfies every C, return true -// - Else return false -// - If GT -// - If GT.semver is lower than any > or >= comp in C, return false -// - If GT is >=, and GT.semver does not satisfy every C, return false -// - If GT.semver has a prerelease, and not in prerelease mode -// - If no C has a prerelease and the GT.semver tuple, return false -// - If LT -// - If LT.semver is greater than any < or <= comp in C, return false -// - If LT is <=, and LT.semver does not satisfy every C, return false -// - If GT.semver has a prerelease, and not in prerelease mode -// - If no C has a prerelease and the LT.semver tuple, return false -// - Else return true - -const subset = (sub, dom, options = {}) => { - if (sub === dom) { - return true - } - - sub = new Range(sub, options) - dom = new Range(dom, options) - let sawNonNull = false - OUTER: for (const simpleSub of sub.set) { - for (const simpleDom of dom.set) { - const isSub = simpleSubset(simpleSub, simpleDom, options) - sawNonNull = sawNonNull || isSub !== null - if (isSub) { - continue OUTER - } - } - // the null set is a subset of everything, but null simple ranges in - // a complex range should be ignored. so if we saw a non-null range, - // then we know this isn't a subset, but if EVERY simple range was null, - // then it is a subset. - if (sawNonNull) { - return false - } - } - return true -} -const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] -const minimumVersion = [new Comparator('>=0.0.0')] +const { kConstruct } = __nccwpck_require__(3157) +const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(8671) +const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(825) +const { kHeadersList } = __nccwpck_require__(9271) +const { webidl } = __nccwpck_require__(3226) +const { Response, cloneResponse } = __nccwpck_require__(8111) +const { Request } = __nccwpck_require__(8816) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(8900) +const { fetching } = __nccwpck_require__(2503) +const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(9611) +const assert = __nccwpck_require__(9491) +const { getGlobalDispatcher } = __nccwpck_require__(8552) -const simpleSubset = (sub, dom, options) => { - if (sub === dom) { - return true - } +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation + * @typedef {Object} CacheBatchOperation + * @property {'delete' | 'put'} type + * @property {any} request + * @property {any} response + * @property {import('../../types/cache').CacheQueryOptions} options + */ - if (sub.length === 1 && sub[0].semver === ANY) { - if (dom.length === 1 && dom[0].semver === ANY) { - return true - } else if (options.includePrerelease) { - sub = minimumVersionWithPreRelease - } else { - sub = minimumVersion - } - } +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list + * @typedef {[any, any][]} requestResponseList + */ - if (dom.length === 1 && dom[0].semver === ANY) { - if (options.includePrerelease) { - return true - } else { - dom = minimumVersion - } - } +class Cache { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list + * @type {requestResponseList} + */ + #relevantRequestResponseList - const eqSet = new Set() - let gt, lt - for (const c of sub) { - if (c.operator === '>' || c.operator === '>=') { - gt = higherGT(gt, c, options) - } else if (c.operator === '<' || c.operator === '<=') { - lt = lowerLT(lt, c, options) - } else { - eqSet.add(c.semver) + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() } - } - if (eqSet.size > 1) { - return null + this.#relevantRequestResponseList = arguments[1] } - let gtltComp - if (gt && lt) { - gtltComp = compare(gt.semver, lt.semver, options) - if (gtltComp > 0) { - return null - } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { - return null - } - } + async match (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) - // will iterate one or zero times - for (const eq of eqSet) { - if (gt && !satisfies(eq, String(gt), options)) { - return null - } + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) - if (lt && !satisfies(eq, String(lt), options)) { - return null - } + const p = await this.matchAll(request, options) - for (const c of dom) { - if (!satisfies(eq, String(c), options)) { - return false - } + if (p.length === 0) { + return } - return true + return p[0] } - let higher, lower - let hasDomLT, hasDomGT - // if the subset has a prerelease, we need a comparator in the superset - // with the same tuple and a prerelease, or it's not a subset - let needDomLTPre = lt && - !options.includePrerelease && - lt.semver.prerelease.length ? lt.semver : false - let needDomGTPre = gt && - !options.includePrerelease && - gt.semver.prerelease.length ? gt.semver : false - // exception: <1.2.3-0 is the same as <1.2.3 - if (needDomLTPre && needDomLTPre.prerelease.length === 1 && - lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { - needDomLTPre = false - } + async matchAll (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) - for (const c of dom) { - hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' - hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' - if (gt) { - if (needDomGTPre) { - if (c.semver.prerelease && c.semver.prerelease.length && - c.semver.major === needDomGTPre.major && - c.semver.minor === needDomGTPre.minor && - c.semver.patch === needDomGTPre.patch) { - needDomGTPre = false - } - } - if (c.operator === '>' || c.operator === '>=') { - higher = higherGT(gt, c, options) - if (higher === c && higher !== gt) { - return false + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + // 1. + let r = null + + // 2. + if (request !== undefined) { + if (request instanceof Request) { + // 2.1.1 + r = request[kState] + + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] } - } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { - return false + } else if (typeof request === 'string') { + // 2.2.1 + r = new Request(request)[kState] } } - if (lt) { - if (needDomLTPre) { - if (c.semver.prerelease && c.semver.prerelease.length && - c.semver.major === needDomLTPre.major && - c.semver.minor === needDomLTPre.minor && - c.semver.patch === needDomLTPre.patch) { - needDomLTPre = false - } + + // 5. + // 5.1 + const responses = [] + + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + responses.push(requestResponse[1]) } - if (c.operator === '<' || c.operator === '<=') { - lower = lowerLT(lt, c, options) - if (lower === c && lower !== lt) { - return false - } - } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { - return false + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + responses.push(requestResponse[1]) } } - if (!c.operator && (lt || gt) && gtltComp !== 0) { - return false + + // 5.4 + // We don't implement CORs so we don't need to loop over the responses, yay! + + // 5.5.1 + const responseList = [] + + // 5.5.2 + for (const response of responses) { + // 5.5.2.1 + const responseObject = new Response(response.body?.source ?? null) + const body = responseObject[kState].body + responseObject[kState] = response + responseObject[kState].body = body + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + + responseList.push(responseObject) } - } - // if there was a < or >, and nothing in the dom, then must be false - // UNLESS it was limited by another range in the other direction. - // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 - if (gt && hasDomLT && !lt && gtltComp !== 0) { - return false + // 6. + return Object.freeze(responseList) } - if (lt && hasDomGT && !gt && gtltComp !== 0) { - return false - } + async add (request) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) - // we needed a prerelease range in a specific tuple, but didn't get one - // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, - // because it includes prereleases in the 1.2.3 tuple - if (needDomGTPre || needDomLTPre) { - return false - } + request = webidl.converters.RequestInfo(request) - return true -} + // 1. + const requests = [request] -// >=1.2.3 is lower than >1.2.3 -const higherGT = (a, b, options) => { - if (!a) { - return b - } - const comp = compare(a.semver, b.semver, options) - return comp > 0 ? a - : comp < 0 ? b - : b.operator === '>' && a.operator === '>=' ? b - : a -} + // 2. + const responseArrayPromise = this.addAll(requests) -// <=1.2.3 is higher than <1.2.3 -const lowerLT = (a, b, options) => { - if (!a) { - return b + // 3. + return await responseArrayPromise } - const comp = compare(a.semver, b.semver, options) - return comp < 0 ? a - : comp > 0 ? b - : b.operator === '<' && a.operator === '<=' ? b - : a -} -module.exports = subset + async addAll (requests) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) + requests = webidl.converters['sequence'](requests) -/***/ }), + // 1. + const responsePromises = [] -/***/ 2706: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 2. + const requestList = [] -const Range = __nccwpck_require__(9828) + // 3. + for (const request of requests) { + if (typeof request === 'string') { + continue + } -// Mostly just for testing and legacy API reasons -const toComparators = (range, options) => - new Range(range, options).set - .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + // 3.1 + const r = request[kState] -module.exports = toComparators + // 3.2 + if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme when method is not GET.' + }) + } + } + // 4. + /** @type {ReturnType[]} */ + const fetchControllers = [] -/***/ }), + // 5. + for (const request of requests) { + // 5.1 + const r = new Request(request)[kState] -/***/ 2098: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 5.2 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme.' + }) + } -const Range = __nccwpck_require__(9828) -const validRange = (range, options) => { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} -module.exports = validRange + // 5.4 + r.initiator = 'fetch' + r.destination = 'subresource' + // 5.5 + requestList.push(r) -/***/ }), + // 5.6 + const responsePromise = createDeferredPromise() -/***/ 6062: -/***/ (function() { + // 5.7 + fetchControllers.push(fetching({ + request: r, + dispatcher: getGlobalDispatcher(), + processResponse (response) { + // 1. + if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Received an invalid status code or the request failed.' + })) + } else if (response.headersList.contains('vary')) { // 2. + // 2.1 + const fieldValues = getFieldValues(response.headersList.get('vary')) -(function (global, undefined) { - "use strict"; + // 2.2 + for (const fieldValue of fieldValues) { + // 2.2.1 + if (fieldValue === '*') { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'invalid vary field value' + })) - if (global.setImmediate) { - return; + for (const controller of fetchControllers) { + controller.abort() + } + + return + } + } + } + }, + processResponseEndOfBody (response) { + // 1. + if (response.aborted) { + responsePromise.reject(new DOMException('aborted', 'AbortError')) + return + } + + // 2. + responsePromise.resolve(response) + } + })) + + // 5.8 + responsePromises.push(responsePromise.promise) } - var nextHandle = 1; // Spec says greater than zero - var tasksByHandle = {}; - var currentlyRunningATask = false; - var doc = global.document; - var registerImmediate; + // 6. + const p = Promise.all(responsePromises) - function setImmediate(callback) { - // Callback can either be a function or a string - if (typeof callback !== "function") { - callback = new Function("" + callback); - } - // Copy function arguments - var args = new Array(arguments.length - 1); - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i + 1]; + // 7. + const responses = await p + + // 7.1 + const operations = [] + + // 7.2 + let index = 0 + + // 7.3 + for (const response of responses) { + // 7.3.1 + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 7.3.2 + request: requestList[index], // 7.3.3 + response // 7.3.4 } - // Store and register the task - var task = { callback: callback, args: args }; - tasksByHandle[nextHandle] = task; - registerImmediate(nextHandle); - return nextHandle++; - } - function clearImmediate(handle) { - delete tasksByHandle[handle]; - } + operations.push(operation) // 7.3.5 - function run(task) { - var callback = task.callback; - var args = task.args; - switch (args.length) { - case 0: - callback(); - break; - case 1: - callback(args[0]); - break; - case 2: - callback(args[0], args[1]); - break; - case 3: - callback(args[0], args[1], args[2]); - break; - default: - callback.apply(undefined, args); - break; - } + index++ // 7.3.6 } - function runIfPresent(handle) { - // From the spec: "Wait until any invocations of this algorithm started before this one have completed." - // So if we're currently running a task, we'll need to delay this invocation. - if (currentlyRunningATask) { - // Delay by doing a setTimeout. setImmediate was tried instead, but in Firefox 7 it generated a - // "too much recursion" error. - setTimeout(runIfPresent, 0, handle); - } else { - var task = tasksByHandle[handle]; - if (task) { - currentlyRunningATask = true; - try { - run(task); - } finally { - clearImmediate(handle); - currentlyRunningATask = false; - } - } - } - } + // 7.5 + const cacheJobPromise = createDeferredPromise() - function installNextTickImplementation() { - registerImmediate = function(handle) { - process.nextTick(function () { runIfPresent(handle); }); - }; - } + // 7.6.1 + let errorData = null - function canUsePostMessage() { - // The test against `importScripts` prevents this implementation from being installed inside a web worker, - // where `global.postMessage` means something completely different and can't be used for this purpose. - if (global.postMessage && !global.importScripts) { - var postMessageIsAsynchronous = true; - var oldOnMessage = global.onmessage; - global.onmessage = function() { - postMessageIsAsynchronous = false; - }; - global.postMessage("", "*"); - global.onmessage = oldOnMessage; - return postMessageIsAsynchronous; - } + // 7.6.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e } - function installPostMessageImplementation() { - // Installs an event handler on `global` for the `message` event: see - // * https://developer.mozilla.org/en/DOM/window.postMessage - // * http://www.whatwg.org/specs/web-apps/current-work/multipage/comms.html#crossDocumentMessages + // 7.6.3 + queueMicrotask(() => { + // 7.6.3.1 + if (errorData === null) { + cacheJobPromise.resolve(undefined) + } else { + // 7.6.3.2 + cacheJobPromise.reject(errorData) + } + }) - var messagePrefix = "setImmediate$" + Math.random() + "$"; - var onGlobalMessage = function(event) { - if (event.source === global && - typeof event.data === "string" && - event.data.indexOf(messagePrefix) === 0) { - runIfPresent(+event.data.slice(messagePrefix.length)); - } - }; + // 7.7 + return cacheJobPromise.promise + } - if (global.addEventListener) { - global.addEventListener("message", onGlobalMessage, false); - } else { - global.attachEvent("onmessage", onGlobalMessage); - } + async put (request, response) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) - registerImmediate = function(handle) { - global.postMessage(messagePrefix + handle, "*"); - }; - } + request = webidl.converters.RequestInfo(request) + response = webidl.converters.Response(response) - function installMessageChannelImplementation() { - var channel = new MessageChannel(); - channel.port1.onmessage = function(event) { - var handle = event.data; - runIfPresent(handle); - }; + // 1. + let innerRequest = null - registerImmediate = function(handle) { - channel.port2.postMessage(handle); - }; + // 2. + if (request instanceof Request) { + innerRequest = request[kState] + } else { // 3. + innerRequest = new Request(request)[kState] } - function installReadyStateChangeImplementation() { - var html = doc.documentElement; - registerImmediate = function(handle) { - // Create a