diff --git a/.github/actions/build-and-test/action.yml b/.github/actions/build-and-test/action.yml new file mode 100755 index 000000000..b8a8999b8 --- /dev/null +++ b/.github/actions/build-and-test/action.yml @@ -0,0 +1,14 @@ +name: 'build-and-test' +runs: + using: "composite" + steps: + - run: | + npm install -g codecov + npm ci + npm run build + npm test + shell: bash + # Upload code coverage to CodeCov, but do not fail the CI if CodeCov upload + # fails as the service is sometimes flaky. + - run: codecov -f ./coverage/coverage-final.json || true + shell: bash diff --git a/.github/workflows/build-and-test.sh b/.github/workflows/build-and-test.sh deleted file mode 100755 index 37cbe832e..000000000 --- a/.github/workflows/build-and-test.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash -set -euxo pipefail - -npm install -g codecov - -npm ci -npm run build -npm test - -# Upload code coverage to CodeCov, but do not fail the CI if CodeCov upload -# fails as the service is sometimes flaky. -codecov -f ./coverage/coverage-final.json || true diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fbf6b3502..9aa36464b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -48,7 +48,7 @@ jobs: - uses: actions/setup-node@v1 with: node-version: '12.x' - - run: .github/workflows/build-and-test.sh + - uses: ./.github/actions/build-and-test - uses: ros-tooling/setup-ros@0.0.25 with: required-ros-distributions: ${{ matrix.ros_distribution }} @@ -74,7 +74,7 @@ jobs: - uses: actions/setup-node@v1 with: node-version: '12.x' - - run: .github/workflows/build-and-test.sh + - uses: ./.github/actions/build-and-test - uses: ros-tooling/setup-ros@0.0.25 with: required-ros-distributions: ${{ matrix.ros_distribution }} @@ -105,7 +105,6 @@ jobs: target-ros2-distro: foxy vcs-repo-file-url: "${{ github.workspace }}/.github/workflows/repo_file_for_test_cpp_with_dependency.repos" - test_ros2_docker: name: "ROS 2 Docker" runs-on: ubuntu-latest @@ -141,7 +140,7 @@ jobs: - uses: actions/setup-node@v1 with: node-version: '12.x' - - run: .github/workflows/build-and-test.sh + - uses: ./.github/actions/build-and-test - uses: ros-tooling/setup-ros@0.0.25 with: required-ros-distributions: ${{ matrix.ros_distribution }} @@ -174,12 +173,33 @@ jobs: matrix: os: [macOS-latest, windows-latest, ubuntu-20.04] ros_distribution: [foxy, rolling] + shell: [bash, pwsh, sh, powershell, cmd] + vs-version: [null, 14, 15, 16] + exclude: + - {os: windows-latest, shell: sh} + - {os: macOS-latest, shell: powershell} + - {os: macOS-latest, shell: cmd} + - {os: macOS-latest, vs-version: 14} + - {os: macOS-latest, vs-version: 15} + - {os: macOS-latest, vs-version: 16} + - {os: ubuntu-20.04, shell: powershell} + - {os: ubuntu-20.04, shell: cmd} + - {os: ubuntu-20.04, vs-version: 14} + - {os: ubuntu-20.04, vs-version: 15} + - {os: ubuntu-20.04, vs-version: 16} + defaults: + run: + shell: ${{matrix.shell}} steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v1 with: node-version: '12.x' - - run: .github/workflows/build-and-test.sh + - if: matrix.vs-version + uses: microsoft/setup-msbuild@v1 + with: + vs-version: ${{ matrix.vs-version }} + - uses: ./.github/actions/build-and-test - uses: ros-tooling/setup-ros@0.0.25 - uses: ./ id: test_single_package @@ -300,7 +320,7 @@ jobs: - uses: actions/setup-node@v1 with: node-version: '12.x' - - run: .github/workflows/build-and-test.sh + - uses: ./.github/actions/build-and-test - uses: ros-tooling/setup-ros@0.0.25 with: required-ros-distributions: ${{ matrix.ros_distribution }} diff --git a/dist/index.js b/dist/index.js index 19482be3a..8f5329c15 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,108 +1,56 @@ module.exports = -/******/ (() => { // webpackBootstrap -/******/ var __webpack_modules__ = ({ - -/***/ 351: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { - -"use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const os = __importStar(__webpack_require__(87)); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -function escapeData(s) { - return toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map - -/***/ }), +/******/ (function(modules, runtime) { // webpackBootstrap +/******/ "use strict"; +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ var threw = true; +/******/ try { +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ threw = false; +/******/ } finally { +/******/ if(threw) delete installedModules[moduleId]; +/******/ } +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ __webpack_require__.ab = __dirname + "/"; +/******/ +/******/ // the startup function +/******/ function startup() { +/******/ // Load entry module and return exports +/******/ return __webpack_require__(374); +/******/ }; +/******/ +/******/ // run startup +/******/ return startup(); +/******/ }) +/************************************************************************/ +/******/ ({ -/***/ 186: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { +/***/ 1: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -115,930 +63,680 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const command_1 = __webpack_require__(351); -const os = __importStar(__webpack_require__(87)); -const path = __importStar(__webpack_require__(622)); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; - /** - * A code indicating that the action was a failure - */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = command_1.toCommandValue(val); - process.env[name] = convertedVal; - command_1.issueCommand('set-env', { name }, convertedVal); -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; +Object.defineProperty(exports, "__esModule", { value: true }); +const childProcess = __webpack_require__(129); +const path = __webpack_require__(622); +const util_1 = __webpack_require__(669); +const ioUtil = __webpack_require__(672); +const exec = util_1.promisify(childProcess.exec); /** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. */ -function addPath(inputPath) { - command_1.issueCommand('add-path', {}, inputPath); - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); } -exports.addPath = addPath; +exports.cp = cp; /** - * Gets the value of an input. The value is also trimmed. + * Moves a path. * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - return val.trim(); +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); } -exports.getInput = getInput; +exports.mv = mv; /** - * Sets the value of an output. + * Remove a path recursively with force * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify + * @param inputPath path to remove */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - command_1.issueCommand('set-output', { name }, value); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - */ -function error(message) { - command_1.issue('error', message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds an warning issue - * @param message warning issue message. Errors will be converted to string via toString() - */ -function warning(message) { - command_1.issue('warning', message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { +function rmRF(inputPath) { return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); + if (ioUtil.IS_WINDOWS) { + // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another + // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. + try { + if (yield ioUtil.isDirectory(inputPath, true)) { + yield exec(`rd /s /q "${inputPath}"`); + } + else { + yield exec(`del /f /a "${inputPath}"`); + } + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + // Shelling out fails to remove a symlink folder with missing source, this unlink catches that + try { + yield ioUtil.unlink(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } } - finally { - endGroup(); + else { + let isDir = false; + try { + isDir = yield ioUtil.isDirectory(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + return; + } + if (isDir) { + yield exec(`rm -rf "${inputPath}"`); + } + else { + yield ioUtil.unlink(inputPath); + } } - return result; }); } -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- +exports.rmRF = rmRF; /** - * Saves state for current action, the state can only be retrieved by this action's post job execution. + * Make a directory. Creates the full path with folders in between + * Will throw if it fails * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify + * @param fsPath path to create + * @returns Promise */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + yield ioUtil.mkdirP(fsPath); + }); } -exports.saveState = saveState; +exports.mkdirP = mkdirP; /** - * Gets the value of an state set by this action's main execution. + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -//# sourceMappingURL=core.js.map - -/***/ }), - -/***/ 159: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const os = __importStar(__webpack_require__(87)); -const events = __importStar(__webpack_require__(614)); -const child = __importStar(__webpack_require__(129)); -const path = __importStar(__webpack_require__(622)); -const io = __importStar(__webpack_require__(436)); -const ioUtil = __importStar(__webpack_require__(962)); -/* eslint-disable @typescript-eslint/unbound-method */ -const IS_WINDOWS = process.platform === 'win32'; -/* - * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool */ -class ToolRunner extends events.EventEmitter { - constructor(toolPath, args, options) { - super(); - if (!toolPath) { - throw new Error("Parameter 'toolPath' cannot be null or empty."); - } - this.toolPath = toolPath; - this.args = args || []; - this.options = options || {}; - } - _debug(message) { - if (this.options.listeners && this.options.listeners.debug) { - this.options.listeners.debug(message); +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); } - } - _getCommandString(options, noPrefix) { - const toolPath = this._getSpawnFileName(); - const args = this._getSpawnArgs(options); - let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool - if (IS_WINDOWS) { - // Windows + cmd file - if (this._isCmdFile()) { - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); } - } - // Windows + verbatim - else if (options.windowsVerbatimArguments) { - cmd += `"${toolPath}"`; - for (const a of args) { - cmd += ` ${a}`; + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } } - // Windows (regular) - else { - cmd += this._windowsQuoteCmdArg(toolPath); - for (const a of args) { - cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + try { + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { + for (const extension of process.env.PATHEXT.split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } } } - } - else { - // OSX/Linux - this can likely be improved with some form of quoting. - // creating processes on Unix is fundamentally different than Windows. - // on Unix, execvp() takes an arg array. - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return filePath; + } + return ''; } - } - return cmd; - } - _processLineBuffer(data, strBuffer, onLine) { - try { - let s = strBuffer + data.toString(); - let n = s.indexOf(os.EOL); - while (n > -1) { - const line = s.substring(0, n); - onLine(line); - // the rest of the string ... - s = s.substring(n + os.EOL.length); - n = s.indexOf(os.EOL); + // if any path separators, return empty + if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { + return ''; } - strBuffer = s; - } - catch (err) { - // streaming lines to console is best effort. Don't fail a build. - this._debug(`error processing line. Failed with error ${err}`); - } - } - _getSpawnFileName() { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - return process.env['COMSPEC'] || 'cmd.exe'; + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } } - } - return this.toolPath; - } - _getSpawnArgs(options) { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; - for (const a of this.args) { - argline += ' '; - argline += options.windowsVerbatimArguments - ? a - : this._windowsQuoteCmdArg(a); + // return the first match + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); + if (filePath) { + return filePath; } - argline += '"'; - return [argline]; } + return ''; } - return this.args; - } - _endsWith(str, end) { - return str.endsWith(end); - } - _isCmdFile() { - const upperToolPath = this.toolPath.toUpperCase(); - return (this._endsWith(upperToolPath, '.CMD') || - this._endsWith(upperToolPath, '.BAT')); - } - _windowsQuoteCmdArg(arg) { - // for .exe, apply the normal quoting rules that libuv applies - if (!this._isCmdFile()) { - return this._uvQuoteCmdArg(arg); - } - // otherwise apply quoting rules specific to the cmd.exe command line parser. - // the libuv rules are generic and are not designed specifically for cmd.exe - // command line parser. - // - // for a detailed description of the cmd.exe command line parser, refer to - // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 - // need quotes for empty arg - if (!arg) { - return '""'; + catch (err) { + throw new Error(`which failed with message ${err.message}`); } - // determine whether the arg needs to be quoted - const cmdSpecialChars = [ - ' ', - '\t', - '&', - '(', - ')', - '[', - ']', - '{', - '}', - '^', - '=', - ';', - '!', - "'", - '+', - ',', - '`', - '~', - '|', - '<', - '>', - '"' - ]; - let needsQuotes = false; - for (const char of arg) { - if (cmdSpecialChars.some(x => x === char)) { - needsQuotes = true; - break; + }); +} +exports.which = which; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + return { force, recursive }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); } - } - // short-circuit if quotes not needed - if (!needsQuotes) { - return arg; - } - // the following quoting rules are very similar to the rules that by libuv applies. - // - // 1) wrap the string in quotes - // - // 2) double-up quotes - i.e. " => "" - // - // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately - // doesn't work well with a cmd.exe command line. - // - // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. - // for example, the command line: - // foo.exe "myarg:""my val""" - // is parsed by a .NET console app into an arg array: - // [ "myarg:\"my val\"" ] - // which is the same end result when applying libuv quoting rules. although the actual - // command line from libuv quoting rules would look like: - // foo.exe "myarg:\"my val\"" - // - // 3) double-up slashes that precede a quote, - // e.g. hello \world => "hello \world" - // hello\"world => "hello\\""world" - // hello\\"world => "hello\\\\""world" - // hello world\ => "hello world\\" - // - // technically this is not required for a cmd.exe command line, or the batch argument parser. - // the reasons for including this as a .cmd quoting rule are: - // - // a) this is optimized for the scenario where the argument is passed from the .cmd file to an - // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. - // - // b) it's what we've been doing previously (by deferring to node default behavior) and we - // haven't heard any complaints about that aspect. - // - // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be - // escaped when used on the command line directly - even though within a .cmd file % can be escaped - // by using %%. - // - // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts - // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. - // - // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would - // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the - // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args - // to an external program. - // - // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. - // % can be escaped within a .cmd file. - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; // double the slash + else { + yield copyFile(srcFile, destFile, force); } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '"'; // double the quote + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); } - else { - quoteHit = false; + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _uvQuoteCmdArg(arg) { - // Tool runner wraps child_process.spawn() and needs to apply the same quoting as - // Node in certain cases where the undocumented spawn option windowsVerbatimArguments - // is used. - // - // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, - // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), - // pasting copyright notice from Node within this function: - // - // Copyright Joyent, Inc. and other Node contributors. All rights reserved. - // - // Permission is hereby granted, free of charge, to any person obtaining a copy - // of this software and associated documentation files (the "Software"), to - // deal in the Software without restriction, including without limitation the - // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - // sell copies of the Software, and to permit persons to whom the Software is - // furnished to do so, subject to the following conditions: - // - // The above copyright notice and this permission notice shall be included in - // all copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - // IN THE SOFTWARE. - if (!arg) { - // Need double quotation for empty argument - return '""'; - } - if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { - // No quotation needed - return arg; - } - if (!arg.includes('"') && !arg.includes('\\')) { - // No embedded double quotes or backslashes, so I can just wrap - // quote marks around the whole thing. - return `"${arg}"`; - } - // Expected input/output: - // input : hello"world - // output: "hello\"world" - // input : hello""world - // output: "hello\"\"world" - // input : hello\world - // output: hello\world - // input : hello\\world - // output: hello\\world - // input : hello\"world - // output: "hello\\\"world" - // input : hello\\"world - // output: "hello\\\\\"world" - // input : hello world\ - // output: "hello world\\" - note the comment in libuv actually reads "hello world\" - // but it appears the comment is wrong, it should be "hello world\\" - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '\\'; - } - else { - quoteHit = false; - } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _cloneExecOptions(options) { - options = options || {}; - const result = { - cwd: options.cwd || process.cwd(), - env: options.env || process.env, - silent: options.silent || false, - windowsVerbatimArguments: options.windowsVerbatimArguments || false, - failOnStdErr: options.failOnStdErr || false, - ignoreReturnCode: options.ignoreReturnCode || false, - delay: options.delay || 10000 - }; - result.outStream = options.outStream || process.stdout; - result.errStream = options.errStream || process.stderr; - return result; + }); +} +//# sourceMappingURL=io.js.map + +/***/ }), + +/***/ 2: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +const os = __webpack_require__(87); +const macosRelease = __webpack_require__(118); +const winRelease = __webpack_require__(49); + +const osName = (platform, release) => { + if (!platform && release) { + throw new Error('You can\'t specify a `release` without specifying `platform`'); + } + + platform = platform || os.platform(); + + let id; + + if (platform === 'darwin') { + if (!release && os.platform() === 'darwin') { + release = os.release(); + } + + const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; + id = release ? macosRelease(release).name : ''; + return prefix + (id ? ' ' + id : ''); + } + + if (platform === 'linux') { + if (!release && os.platform() === 'linux') { + release = os.release(); + } + + id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; + return 'Linux' + (id ? ' ' + id : ''); + } + + if (platform === 'win32') { + if (!release && os.platform() === 'win32') { + release = os.release(); + } + + id = release ? winRelease(release) : ''; + return 'Windows' + (id ? ' ' + id : ''); + } + + return platform; +}; + +module.exports = osName; + + +/***/ }), + +/***/ 9: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var once = __webpack_require__(969); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 11: +/***/ (function(module) { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] } - _getSpawnOptions(options, toolPath) { - options = options || {}; - const result = {}; - result.cwd = options.cwd; - result.env = options.env; - result['windowsVerbatimArguments'] = - options.windowsVerbatimArguments || this._isCmdFile(); - if (options.windowsVerbatimArguments) { - result.argv0 = `"${toolPath}"`; - } - return result; - } - /** - * Exec a tool. - * Output will be streamed to the live console. - * Returns promise with return code - * - * @param tool path to tool to exec - * @param options optional exec options. See ExecOptions - * @returns number - */ - exec() { - return __awaiter(this, void 0, void 0, function* () { - // root the tool path if it is unrooted and contains relative pathing - if (!ioUtil.isRooted(this.toolPath) && - (this.toolPath.includes('/') || - (IS_WINDOWS && this.toolPath.includes('\\')))) { - // prefer options.cwd if it is specified, however options.cwd may also need to be rooted - this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); - } - // if the tool is only a file name, then resolve it from the PATH - // otherwise verify it exists (add extension on Windows if necessary) - this.toolPath = yield io.which(this.toolPath, true); - return new Promise((resolve, reject) => { - this._debug(`exec tool: ${this.toolPath}`); - this._debug('arguments:'); - for (const arg of this.args) { - this._debug(` ${arg}`); - } - const optionsNonNull = this._cloneExecOptions(this.options); - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); - } - const state = new ExecState(optionsNonNull, this.toolPath); - state.on('debug', (message) => { - this._debug(message); - }); - const fileName = this._getSpawnFileName(); - const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - const stdbuffer = ''; - if (cp.stdout) { - cp.stdout.on('data', (data) => { - if (this.options.listeners && this.options.listeners.stdout) { - this.options.listeners.stdout(data); - } - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(data); - } - this._processLineBuffer(data, stdbuffer, (line) => { - if (this.options.listeners && this.options.listeners.stdline) { - this.options.listeners.stdline(line); - } - }); - }); - } - const errbuffer = ''; - if (cp.stderr) { - cp.stderr.on('data', (data) => { - state.processStderr = true; - if (this.options.listeners && this.options.listeners.stderr) { - this.options.listeners.stderr(data); - } - if (!optionsNonNull.silent && - optionsNonNull.errStream && - optionsNonNull.outStream) { - const s = optionsNonNull.failOnStdErr - ? optionsNonNull.errStream - : optionsNonNull.outStream; - s.write(data); - } - this._processLineBuffer(data, errbuffer, (line) => { - if (this.options.listeners && this.options.listeners.errline) { - this.options.listeners.errline(line); - } - }); - }); - } - cp.on('error', (err) => { - state.processError = err.message; - state.processExited = true; - state.processClosed = true; - state.CheckComplete(); - }); - cp.on('exit', (code) => { - state.processExitCode = code; - state.processExited = true; - this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); - state.CheckComplete(); - }); - cp.on('close', (code) => { - state.processExitCode = code; - state.processExited = true; - state.processClosed = true; - this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); - state.CheckComplete(); - }); - state.on('done', (error, exitCode) => { - if (stdbuffer.length > 0) { - this.emit('stdline', stdbuffer); - } - if (errbuffer.length > 0) { - this.emit('errline', errbuffer); - } - cp.removeAllListeners(); - if (error) { - reject(error); - } - else { - resolve(exitCode); - } - }); - if (this.options.input) { - if (!cp.stdin) { - throw new Error('child process missing stdin'); - } - cp.stdin.end(this.options.input); - } - }); - }); - } -} -exports.ToolRunner = ToolRunner; -/** - * Convert an arg string to an array of args. Handles escaping - * - * @param argString string of arguments - * @returns string[] array of arguments - */ -function argStringToArray(argString) { - const args = []; - let inQuotes = false; - let escaped = false; - let arg = ''; - function append(c) { - // we only escape double quotes. - if (escaped && c !== '"') { - arg += '\\'; - } - arg += c; - escaped = false; - } - for (let i = 0; i < argString.length; i++) { - const c = argString.charAt(i); - if (c === '"') { - if (!escaped) { - inQuotes = !inQuotes; - } - else { - append(c); - } - continue; - } - if (c === '\\' && escaped) { - append(c); - continue; - } - if (c === '\\' && inQuotes) { - escaped = true; - continue; - } - if (c === ' ' && !inQuotes) { - if (arg.length > 0) { - args.push(arg); - arg = ''; - } - continue; - } - append(c); - } - if (arg.length > 0) { - args.push(arg.trim()); - } - return args; -} -exports.argStringToArray = argStringToArray; -class ExecState extends events.EventEmitter { - constructor(options, toolPath) { - super(); - this.processClosed = false; // tracks whether the process has exited and stdio is closed - this.processError = ''; - this.processExitCode = 0; - this.processExited = false; // tracks whether the process has exited - this.processStderr = false; // tracks whether stderr was written to - this.delay = 10000; // 10 seconds - this.done = false; - this.timeout = null; - if (!toolPath) { - throw new Error('toolPath must not be empty'); - } - this.options = options; - this.toolPath = toolPath; - if (options.delay) { - this.delay = options.delay; - } - } - CheckComplete() { - if (this.done) { - return; - } - if (this.processClosed) { - this._setResult(); - } - else if (this.processExited) { - this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); - } - } - _debug(message) { - this.emit('debug', message); - } - _setResult() { - // determine whether there is an error - let error; - if (this.processExited) { - if (this.processError) { - error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); - } - else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { - error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); - } - else if (this.processStderr && this.options.failOnStdErr) { - error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); - } - } - // clear the timeout - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = null; - } - this.done = true; - this.emit('done', error, this.processExitCode); - } - static HandleTimeout(state) { - if (state.done) { - return; - } - if (!state.processClosed && state.processExited) { - const message = `The STDIO streams did not close within ${state.delay / - 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; - state._debug(message); - } - state._setResult(); + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) } + return ret + } } -//# sourceMappingURL=toolrunner.js.map + + +/***/ }), + +/***/ 16: +/***/ (function(module) { + +module.exports = require("tls"); /***/ }), -/***/ 53: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ 18: +/***/ (function(module) { -"use strict"; +module.exports = eval("require")("encoding"); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Context = void 0; -const fs_1 = __webpack_require__(747); -const os_1 = __webpack_require__(87); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map /***/ }), -/***/ 438: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { +/***/ 20: +/***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); + +const cp = __webpack_require__(129); +const parse = __webpack_require__(568); +const enoent = __webpack_require__(881); + +function spawn(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + + // Hook into child process "exit" event to emit an error if the command + // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + enoent.hookChildProcess(spawned, parsed); + + return spawned; +} + +function spawnSync(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + + // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); + return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__webpack_require__(53)); -const utils_1 = __webpack_require__(30); -exports.context = new Context.Context(); -/** - * Returns a hydrated octokit ready to use for GitHub Actions - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokit(token, options) { - return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); } -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map + +module.exports = spawn; +module.exports.spawn = spawn; +module.exports.sync = spawnSync; + +module.exports._parse = parse; +module.exports._enoent = enoent; + + +/***/ }), + +/***/ 39: +/***/ (function(module) { + +"use strict"; + +module.exports = opts => { + opts = opts || {}; + + const env = opts.env || process.env; + const platform = opts.platform || process.platform; + + if (platform !== 'win32') { + return 'PATH'; + } + + return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; +}; + + +/***/ }), + +/***/ 49: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +const os = __webpack_require__(87); +const execa = __webpack_require__(955); + +// Reference: https://www.gaijin.at/en/lstwinver.php +const names = new Map([ + ['10.0', '10'], + ['6.3', '8.1'], + ['6.2', '8'], + ['6.1', '7'], + ['6.0', 'Vista'], + ['5.2', 'Server 2003'], + ['5.1', 'XP'], + ['5.0', '2000'], + ['4.9', 'ME'], + ['4.1', '98'], + ['4.0', '95'] +]); + +const windowsRelease = release => { + const version = /\d+\.\d/.exec(release || os.release()); + + if (release && !version) { + throw new Error('`release` argument doesn\'t match `n.n`'); + } + + const ver = (version || [])[0]; + + // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. + // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version + // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx + // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. + // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. + if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { + let stdout; + try { + stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; + } catch (_) { + stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; + } + + const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; + + if (year) { + return `Server ${year}`; + } + } + + return names.get(ver); +}; + +module.exports = windowsRelease; + + +/***/ }), + +/***/ 87: +/***/ (function(module) { + +module.exports = require("os"); + +/***/ }), + +/***/ 118: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +const os = __webpack_require__(87); + +const nameMap = new Map([ + [20, ['Big Sur', '11']], + [19, ['Catalina', '10.15']], + [18, ['Mojave', '10.14']], + [17, ['High Sierra', '10.13']], + [16, ['Sierra', '10.12']], + [15, ['El Capitan', '10.11']], + [14, ['Yosemite', '10.10']], + [13, ['Mavericks', '10.9']], + [12, ['Mountain Lion', '10.8']], + [11, ['Lion', '10.7']], + [10, ['Snow Leopard', '10.6']], + [9, ['Leopard', '10.5']], + [8, ['Tiger', '10.4']], + [7, ['Panther', '10.3']], + [6, ['Jaguar', '10.2']], + [5, ['Puma', '10.1']] +]); + +const macosRelease = release => { + release = Number((release || os.release()).split('.')[0]); + + const [name, version] = nameMap.get(release); + + return { + name, + version + }; +}; + +module.exports = macosRelease; +// TODO: remove this in the next major version +module.exports.default = macosRelease; + /***/ }), -/***/ 914: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { +/***/ 127: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -1061,9 +759,9 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); +Object.defineProperty(exports, "__esModule", { value: true }); exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__webpack_require__(925)); +const httpClient = __importStar(__webpack_require__(539)); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); @@ -1087,6220 +785,10088 @@ exports.getApiBaseUrl = getApiBaseUrl; /***/ }), -/***/ 30: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { - -"use strict"; +/***/ 129: +/***/ (function(module) { -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.context = void 0; -const Context = __importStar(__webpack_require__(53)); -const Utils = __importStar(__webpack_require__(914)); -// octokit + plugins -const core_1 = __webpack_require__(762); -const plugin_rest_endpoint_methods_1 = __webpack_require__(44); -const plugin_paginate_rest_1 = __webpack_require__(193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -const defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl) - } -}; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); -/** - * Convience function to correctly format Octokit Options to pass into the constructor. - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; -} -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map +module.exports = require("child_process"); /***/ }), -/***/ 925: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ 141: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const url = __webpack_require__(835); -const http = __webpack_require__(605); -const https = __webpack_require__(211); -const pm = __webpack_require__(443); -let tunnel; -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; + +var net = __webpack_require__(631); +var tls = __webpack_require__(16); +var http = __webpack_require__(605); +var https = __webpack_require__(211); +var events = __webpack_require__(614); +var assert = __webpack_require__(357); +var util = __webpack_require__(669); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; } -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - }); - } + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - let parsedUrl = url.parse(requestUrl); - return parsedUrl.protocol === 'https:'; + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; } -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - } - get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } } - del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); } - post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); } - patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port } - put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; } - head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 145: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +const pump = __webpack_require__(453); +const bufferStream = __webpack_require__(966); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + options = Object.assign({maxBuffer: Infinity}, options); + + const {maxBuffer} = options; + + let stream; + return new Promise((resolve, reject) => { + const rejectPromise = error => { + if (error) { // A null check + error.bufferedData = stream.getBufferedValue(); + } + reject(error); + }; + + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } + + resolve(); + }); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }).then(() => stream.getBufferedValue()); +} + +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); +module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); +module.exports.MaxBufferError = MaxBufferError; + + +/***/ }), + +/***/ 168: +/***/ (function(module) { + +"use strict"; + +const alias = ['stdin', 'stdout', 'stderr']; + +const hasAlias = opts => alias.some(x => Boolean(opts[x])); + +module.exports = opts => { + if (!opts) { + return null; + } + + if (opts.stdio && hasAlias(opts)) { + throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); + } + + if (typeof opts.stdio === 'string') { + return opts.stdio; + } + + const stdio = opts.stdio || []; + + if (!Array.isArray(stdio)) { + throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + } + + const result = []; + const len = Math.max(stdio.length, alias.length); + + for (let i = 0; i < len; i++) { + let value = null; + + if (stdio[i] !== undefined) { + value = stdio[i]; + } else if (opts[alias[i]] !== undefined) { + value = opts[alias[i]]; + } + + result[i] = value; + } + + return result; +}; + + +/***/ }), + +/***/ 197: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = isexe +isexe.sync = sync + +var fs = __webpack_require__(747) + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), options) +} + +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} + +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid + + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() + + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g + + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 + + return ret +} + + +/***/ }), + +/***/ 211: +/***/ (function(module) { + +module.exports = require("https"); + +/***/ }), + +/***/ 260: +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +var assert = __webpack_require__(357) +var signals = __webpack_require__(654) +var isWin = /^win/i.test(process.platform) + +var EE = __webpack_require__(614) +/* istanbul ignore if */ +if (typeof EE !== 'function') { + EE = EE.EventEmitter +} + +var emitter +if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ +} else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} +} + +// Because this emitter is a global, we have to check to see if a +// previous version of this library failed to enable infinite listeners. +// I know what you're about to say. But literally everything about +// signal-exit is a compromise with evil. Get used to it. +if (!emitter.infinite) { + emitter.setMaxListeners(Infinity) + emitter.infinite = true +} + +module.exports = function (cb, opts) { + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') + + if (loaded === false) { + load() + } + + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } + + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + } + emitter.on(ev, cb) + + return remove +} + +module.exports.unload = unload +function unload () { + if (!loaded) { + return + } + loaded = false + + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 +} + +function emit (event, code, signal) { + if (emitter.emitted[event]) { + return + } + emitter.emitted[event] = true + emitter.emit(event, code, signal) +} + +// { : , ... } +var sigListeners = {} +signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + if (isWin && sig === 'SIGHUP') { + // "SIGHUP" throws an `ENOSYS` error on Windows, + // so use a supported signal instead + sig = 'SIGINT' + } + process.kill(process.pid, sig) } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + } +}) + +module.exports.signals = function () { + return signals +} + +module.exports.load = load + +var loaded = false + +function load () { + if (loaded) { + return + } + loaded = true + + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 + + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false + } + }) + + process.emit = processEmit + process.reallyExit = processReallyExit +} + +var originalProcessReallyExit = process.reallyExit +function processReallyExit (code) { + process.exitCode = code || 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) +} + +var originalProcessEmit = process.emit +function processEmit (ev, arg) { + if (ev === 'exit') { + if (arg !== undefined) { + process.exitCode = arg } + var ret = originalProcessEmit.apply(this, arguments) + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + return ret + } else { + return originalProcessEmit.apply(this, arguments) + } +} + + +/***/ }), + +/***/ 262: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Context = void 0; +const fs_1 = __webpack_require__(747); +const os_1 = __webpack_require__(87); +class Context { /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch + * Hydrate the context from the environment */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = url.parse(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = url.parse(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; + constructor() { + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); } } - return response; - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); - } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); - }); + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - let handleResult = (err, res) => { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); - }); - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error('Request timeout: ' + info.options.path), null); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err, null); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; } - else { - req.end(); + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - let parsedUrl = url.parse(serverUrl); - return this._getAgent(parsedUrl); +} +exports.Context = Context; +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 280: +/***/ (function(module, exports) { + +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var R = 0 + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++ +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' +var NUMERICIDENTIFIERLOOSE = R++ +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +var NONNUMERICIDENTIFIER = R++ +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +var MAINVERSION = R++ +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')' + +var MAINVERSIONLOOSE = R++ +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +var PRERELEASEIDENTIFIER = R++ +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +var PRERELEASEIDENTIFIERLOOSE = R++ +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++ +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' + +var PRERELEASELOOSE = R++ +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +var BUILDIDENTIFIER = R++ +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +var BUILD = R++ +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +var FULL = R++ +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?' + +src[FULL] = '^' + FULLPLAIN + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?' + +var LOOSE = R++ +src[LOOSE] = '^' + LOOSEPLAIN + '$' + +var GTLT = R++ +src[GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++ +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +var XRANGEIDENTIFIER = R++ +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' + +var XRANGEPLAIN = R++ +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?' + +var XRANGEPLAINLOOSE = R++ +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?' + +var XRANGE = R++ +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' +var XRANGELOOSE = R++ +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++ +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++ +src[LONETILDE] = '(?:~>?)' + +var TILDETRIM = R++ +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +var tildeTrimReplace = '$1~' + +var TILDE = R++ +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' +var TILDELOOSE = R++ +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++ +src[LONECARET] = '(?:\\^)' + +var CARETTRIM = R++ +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +var caretTrimReplace = '$1^' + +var CARET = R++ +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' +var CARETLOOSE = R++ +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++ +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' +var COMPARATOR = R++ +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++ +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++ +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$' + +var HYPHENRANGELOOSE = R++ +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +var STAR = R++ +src[STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - this.handlers.forEach(handler => { - handler.prepareRequest(info.options); - }); - } - return info; + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? re[LOOSE] : re[FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); - } - return lowercaseKeys(headers || {}); + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num } - return additionalHeaders[header] || clientHeader || _default; + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) } - _getAgent(parsedUrl) { - let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } } - if (this._keepAlive && !useProxy) { - agent = this._agent; + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) } - // if agent is already assigned use that agent. - if (!!agent) { - return agent; + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (!!this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __webpack_require__(294); - } - const agentOptions = { - maxSockets: maxSockets, - keepAlive: this._keepAlive, - proxy: { - proxyAuth: proxyUrl.auth, - host: proxyUrl.hostname, - port: proxyUrl.port - } - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new Error(msg); - // attach statusCode and body obj (if available) to the error object - err['statusCode'] = statusCode; - if (response.result) { - err['result'] = response.result; - } - reject(err); - } - else { - resolve(response); - } - }); + } } + return defaultResult // may be undefined + } } -exports.HttpClient = HttpClient; +exports.compareIdentifiers = compareIdentifiers -/***/ }), +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) -/***/ 443: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + if (anum && bnum) { + a = +a + b = +b + } -"use strict"; + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -const url = __webpack_require__(835); -function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; - if (checkBypass(reqUrl)) { - return proxyUrl; - } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - if (proxyVar) { - proxyUrl = url.parse(proxyVar); - } - return proxyUrl; +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) } -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major } -exports.checkBypass = checkBypass; +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} -/***/ }), +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} -/***/ 962: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} -"use strict"; +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var _a; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const assert_1 = __webpack_require__(357); -const fs = __webpack_require__(747); -const path = __webpack_require__(622); -_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; -exports.IS_WINDOWS = process.platform === 'win32'; -function exists(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exports.stat(fsPath); - } - catch (err) { - if (err.code === 'ENOENT') { - return false; - } - throw err; - } - return true; - }); +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) } -exports.exists = exists; -function isDirectory(fsPath, useStat = false) { - return __awaiter(this, void 0, void 0, function* () { - const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); - return stats.isDirectory(); - }); + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compare(a, b, loose) + }) } -exports.isDirectory = isDirectory; -/** - * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: - * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). - */ -function isRooted(p) { - p = normalizeSeparators(p); - if (!p) { - throw new Error('isRooted() parameter "p" cannot be empty'); - } - if (exports.IS_WINDOWS) { - return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello - ); // e.g. C: or C:\hello - } - return p.startsWith('/'); + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.rcompare(a, b, loose) + }) } -exports.isRooted = isRooted; -/** - * Recursively create a directory at `fsPath`. - * - * This implementation is optimistic, meaning it attempts to create the full - * path first, and backs up the path stack from there. - * - * @param fsPath The path to create - * @param maxDepth The maximum recursion depth - * @param depth The current recursion depth - */ -function mkdirP(fsPath, maxDepth = 1000, depth = 1) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(fsPath, 'a path argument must be provided'); - fsPath = path.resolve(fsPath); - if (depth >= maxDepth) - return exports.mkdir(fsPath); - try { - yield exports.mkdir(fsPath); - return; - } - catch (err) { - switch (err.code) { - case 'ENOENT': { - yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); - yield exports.mkdir(fsPath); - return; - } - default: { - let stats; - try { - stats = yield exports.stat(fsPath); - } - catch (err2) { - throw err; - } - if (!stats.isDirectory()) - throw err; - } - } - } - }); + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 } -exports.mkdirP = mkdirP; -/** - * Best effort attempt to determine whether a file exists and is executable. - * @param filePath file path to check - * @param extensions additional file extensions to try - * @return if file exists and is executable, returns the file path. otherwise empty string. - */ -function tryGetExecutablePath(filePath, extensions) { - return __awaiter(this, void 0, void 0, function* () { - let stats = undefined; - try { - // test file exists - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // on Windows, test for valid extension - const upperExt = path.extname(filePath).toUpperCase(); - if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { - return filePath; - } - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - // try each extension - const originalFilePath = filePath; - for (const extension of extensions) { - filePath = originalFilePath + extension; - stats = undefined; - try { - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // preserve the case of the actual file (since an extension was appended) - try { - const directory = path.dirname(filePath); - const upperName = path.basename(filePath).toUpperCase(); - for (const actualName of yield exports.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path.join(directory, actualName); - break; - } - } - } - catch (err) { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); - } - return filePath; - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - } - return ''; - }); + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 } -exports.tryGetExecutablePath = tryGetExecutablePath; -function normalizeSeparators(p) { - p = p || ''; - if (exports.IS_WINDOWS) { - // convert slashes on Windows - p = p.replace(/\//g, '\\'); - // remove redundant slashes - return p.replace(/\\\\+/g, '\\'); - } - // remove redundant slashes - return p.replace(/\/\/+/g, '/'); + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 } -// on Mac/Linux, test the execute bit -// R W X R W X R W X -// 256 128 64 32 16 8 4 2 1 -function isUnixExecutable(stats) { - return ((stats.mode & 1) > 0 || - ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || - ((stats.mode & 64) > 0 && stats.uid === process.getuid())); + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 } -//# sourceMappingURL=io-util.js.map -/***/ }), +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} -/***/ 436: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} -"use strict"; +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const childProcess = __webpack_require__(129); -const path = __webpack_require__(622); -const util_1 = __webpack_require__(669); -const ioUtil = __webpack_require__(962); -const exec = util_1.promisify(childProcess.exec); -/** - * Copies a file or folder. - * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js - * - * @param source source path - * @param dest destination path - * @param options optional. See CopyOptions. - */ -function cp(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - const { force, recursive } = readCopyOptions(options); - const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; - // Dest is an existing file, but not forcing - if (destStat && destStat.isFile() && !force) { - return; - } - // If dest is an existing directory, should copy inside. - const newDest = destStat && destStat.isDirectory() - ? path.join(dest, path.basename(source)) - : dest; - if (!(yield ioUtil.exists(source))) { - throw new Error(`no such file or directory: ${source}`); - } - const sourceStat = yield ioUtil.stat(source); - if (sourceStat.isDirectory()) { - if (!recursive) { - throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); - } - else { - yield cpDirRecursive(source, newDest, 0, force); - } - } - else { - if (path.relative(source, newDest) === '') { - // a file cannot be copied to itself - throw new Error(`'${newDest}' and '${source}' are the same file`); - } - yield copyFile(source, newDest, force); - } - }); + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } } -exports.cp = cp; -/** - * Moves a path. - * - * @param source source path - * @param dest destination path - * @param options optional. See MoveOptions. - */ -function mv(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - if (yield ioUtil.exists(dest)) { - let destExists = true; - if (yield ioUtil.isDirectory(dest)) { - // If dest is directory copy src into dest - dest = path.join(dest, path.basename(source)); - destExists = yield ioUtil.exists(dest); - } - if (destExists) { - if (options.force == null || options.force) { - yield rmRF(dest); - } - else { - throw new Error('Destination already exists'); - } - } - } - yield mkdirP(path.dirname(dest)); - yield ioUtil.rename(source, dest); - }); + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) } -exports.mv = mv; -/** - * Remove a path recursively with force - * - * @param inputPath path to remove - */ -function rmRF(inputPath) { - return __awaiter(this, void 0, void 0, function* () { - if (ioUtil.IS_WINDOWS) { - // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another - // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. - try { - if (yield ioUtil.isDirectory(inputPath, true)) { - yield exec(`rd /s /q "${inputPath}"`); - } - else { - yield exec(`del /f /a "${inputPath}"`); - } - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; - } - // Shelling out fails to remove a symlink folder with missing source, this unlink catches that - try { - yield ioUtil.unlink(inputPath); - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; - } - } - else { - let isDir = false; - try { - isDir = yield ioUtil.isDirectory(inputPath); - } - catch (err) { - // if you try to delete a file that doesn't exist, desired result is achieved - // other errors are valid - if (err.code !== 'ENOENT') - throw err; - return; - } - if (isDir) { - yield exec(`rm -rf "${inputPath}"`); - } - else { - yield ioUtil.unlink(inputPath); - } - } - }); -} -exports.rmRF = rmRF; -/** - * Make a directory. Creates the full path with folders in between - * Will throw if it fails - * - * @param fsPath path to create - * @returns Promise - */ -function mkdirP(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - yield ioUtil.mkdirP(fsPath); - }); -} -exports.mkdirP = mkdirP; -/** - * Returns path of a tool had the tool actually been invoked. Resolves via paths. - * If you check and the tool does not exist, it will throw. - * - * @param tool name of the tool - * @param check whether to check if tool exists - * @returns Promise path to tool - */ -function which(tool, check) { - return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - // recursive when check=true - if (check) { - const result = yield which(tool, false); - if (!result) { - if (ioUtil.IS_WINDOWS) { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); - } - else { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); - } - } - } - try { - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { - for (const extension of process.env.PATHEXT.split(path.delimiter)) { - if (extension) { - extensions.push(extension); - } - } - } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return filePath; - } - return ''; - } - // if any path separators, return empty - if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { - return ''; - } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); - } - } - } - // return the first match - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); - if (filePath) { - return filePath; - } - } - return ''; - } - catch (err) { - throw new Error(`which failed with message ${err.message}`); - } - }); -} -exports.which = which; -function readCopyOptions(options) { - const force = options.force == null ? true : options.force; - const recursive = Boolean(options.recursive); - return { force, recursive }; -} -function cpDirRecursive(sourceDir, destDir, currentDepth, force) { - return __awaiter(this, void 0, void 0, function* () { - // Ensure there is not a run away recursive copy - if (currentDepth >= 255) - return; - currentDepth++; - yield mkdirP(destDir); - const files = yield ioUtil.readdir(sourceDir); - for (const fileName of files) { - const srcFile = `${sourceDir}/${fileName}`; - const destFile = `${destDir}/${fileName}`; - const srcFileStat = yield ioUtil.lstat(srcFile); - if (srcFileStat.isDirectory()) { - // Recurse - yield cpDirRecursive(srcFile, destFile, currentDepth, force); - } - else { - yield copyFile(srcFile, destFile, force); - } - } - // Change the mode for the newly created directory - yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); - }); + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } } -// Buffered file copy -function copyFile(srcFile, destFile, force) { - return __awaiter(this, void 0, void 0, function* () { - if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { - // unlink/re-link it - try { - yield ioUtil.lstat(destFile); - yield ioUtil.unlink(destFile); - } - catch (e) { - // Try to override file permission - if (e.code === 'EPERM') { - yield ioUtil.chmod(destFile, '0666'); - yield ioUtil.unlink(destFile); - } - // other errors = it doesn't exist, no work to do - } - // Copy over symlink - const symlinkFull = yield ioUtil.readlink(srcFile); - yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); - } - else if (!(yield ioUtil.exists(destFile)) || force) { - yield ioUtil.copyFile(srcFile, destFile); - } - }); + +Comparator.prototype.toString = function () { + return this.value } -//# sourceMappingURL=io.js.map -/***/ }), +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { + if (this.semver === ANY) { + return true + } -"use strict"; + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + return cmp(version, this.operator, this.semver, this.options) +} -Object.defineProperty(exports, "__esModule", ({ value: true })); +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } -async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; + var rangeTmp + + if (this.operator === '') { + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) } - return `token ${token}`; -} + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + if (range instanceof Comparator) { + return new Range(range.value, options) } - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + if (!(this instanceof Range)) { + return new Range(range, options) } - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } + this.format() +} -/***/ }), +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} -/***/ 762: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +Range.prototype.toString = function () { + return this.range +} -"use strict"; +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[COMPARATORTRIM]) + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace) -Object.defineProperty(exports, "__esModule", ({ value: true })); + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace) -var universalUserAgent = __webpack_require__(429); -var beforeAfterHook = __webpack_require__(682); -var request = __webpack_require__(234); -var graphql = __webpack_require__(668); -var authToken = __webpack_require__(334); + // normalize spaces + range = range.split(/\s+/).join(' ') -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) - return obj; + return set } -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - if (enumerableOnly) symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - keys.push.apply(keys, symbols); +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') } - return keys; + return this.set.some(function (thisComparators) { + return thisComparators.every(function (thisComparator) { + return range.set.some(function (rangeComparators) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + }) + }) } -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? re[TILDELOOSE] : re[TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' } - } - return target; + debug('tilde return', ret) + return ret + }) } -const VERSION = "3.1.2"; - -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[CARETLOOSE] : re[CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' } - }; // prepend default user agent with `options.userAgent` if set + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + debug('caret return', ret) + return ret + }) +} - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' } - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' } - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { - baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") - })); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. + debug('xRange return', ret) - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ + return ret + }) +} - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const auth = options.authStrategy(Object.assign({ - request: this.request - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], '') +} + +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + return (from + ' ' + to).trim() +} - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false } - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } - if (typeof defaults === "function") { - super(defaults(options)); - return; - } + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue } - }; - return OctokitWithDefaults; + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ + return true +} - static plugin(...newPlugins) { - var _a; +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min } -Octokit.VERSION = VERSION; -Octokit.plugins = []; -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } -/***/ }), + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } -/***/ 440: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + if (minver && range.test(minver)) { + return minver + } -"use strict"; + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } -Object.defineProperty(exports, "__esModule", ({ value: true })); + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. -var isPlainObject = _interopDefault(__webpack_require__(840)); -var universalUserAgent = __webpack_require__(429); + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -function lowercaseKeys(object) { - if (!object) { - return {}; + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } } + return true +} - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null } -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) } -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates +exports.coerce = coerce +function coerce (version) { + if (version instanceof SemVer) { + return version + } + if (typeof version !== 'string') { + return null + } - options.headers = lowercaseKeys(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + var match = version.match(re[COERCE]) - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + if (match == null) { + return null } - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; + return parse(match[1] + + '.' + (match[2] || '0') + + '.' + (match[3] || '0')) } -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } +/***/ }), - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} +/***/ 299: +/***/ (function(__unusedmodule, exports) { -const urlVariableRegex = /\{[^}]+\}/g; +"use strict"; -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); +Object.defineProperty(exports, '__esModule', { value: true }); - if (!matches) { - return []; - } +const VERSION = "2.2.3"; - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } - return part; - }).join(""); + response.data.total_count = totalCount; + return response; } -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + next() { + if (!url) { + return Promise.resolve({ + done: true + }); + } -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + return requestMethod({ + method, + url, + headers + }).then(normalizePaginatedListResponse).then(response => { + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: response + }; + }); + } - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } + }) + }; } -function isDefined(value) { - return value !== undefined && value !== null; -} +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); } -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } + let earlyExit = false; - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; + function done() { + earlyExit = true; + } - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); + if (earlyExit) { + return results; } - } - return result; + return gather(octokit, results, iterator, mapFn); + }); } -function parseUrl(template) { +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function paginateRest(octokit) { return { - expand: expand.bind(null, template) + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) }; } +paginateRest.VERSION = VERSION; -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; +exports.paginateRest = paginateRest; +//# sourceMappingURL=index.js.map - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); +/***/ }), - if (operator && operator !== "+") { - var separator = ","; +/***/ 323: +/***/ (function(module) { - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequset) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} +"use strict"; -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} -const VERSION = "6.0.5"; +var isStream = module.exports = function (stream) { + return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; +}; -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. +isStream.writable = function (stream) { + return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; +}; -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } +isStream.readable = function (stream) { + return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; }; -const endpoint = withDefaults(null, DEFAULTS); +isStream.duplex = function (stream) { + return isStream.writable(stream) && isStream.readable(stream); +}; -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map +isStream.transform = function (stream) { + return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; +}; /***/ }), -/***/ 668: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); +/***/ 357: +/***/ (function(module) { -var request = __webpack_require__(234); -var universalUserAgent = __webpack_require__(429); +module.exports = require("assert"); -const VERSION = "4.5.4"; +/***/ }), -class GraphqlError extends Error { - constructor(request, response) { - const message = response.data.errors[0].message; - super(message); - Object.assign(this, response.data); - Object.assign(this, { - headers: response.headers - }); - this.name = "GraphqlError"; - this.request = request; // Maintains proper stack trace (only available on V8) +/***/ 363: +/***/ (function(module) { - /* istanbul ignore next */ +module.exports = register - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } +function register (state, name, method, options) { + if (typeof method !== 'function') { + throw new Error('method for before hook must be a function') } -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -function graphql(request, query, options) { - options = typeof query === "string" ? options = Object.assign({ - query - }, options) : options = query; - const requestOptions = Object.keys(options).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = options[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } + if (!options) { + options = {} + } - result.variables[key] = options[key]; - return result; - }, {}); - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options) + }, method)() + } - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; + return Promise.resolve() + .then(function () { + if (!state.registry[name]) { + return method(options) } - throw new GraphqlError(requestOptions, { - headers, - data: response.data - }); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); + return (state.registry[name]).reduce(function (method, registered) { + return registered.hook.bind(null, method, options) + }, method)() + }) } -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - /***/ }), -/***/ 193: -/***/ ((__unused_webpack_module, exports) => { +/***/ 374: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.3.2"; - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.validateDistros = exports.execBashCommand = void 0; +const core = __importStar(__webpack_require__(470)); +const github = __importStar(__webpack_require__(469)); +const tr = __importStar(__webpack_require__(686)); +const io = __importStar(__webpack_require__(1)); +const os = __importStar(__webpack_require__(87)); +const path = __importStar(__webpack_require__(622)); +const fs_1 = __importDefault(__webpack_require__(747)); +// All command line flags passed to curl when invoked as a command. +const curlFlagsArray = [ + // (HTTP) Fail silently (no output at all) on server errors. This is mostly done to better enable + // scripts etc to better deal with failed attempts. In normal cases when a HTTP server fails to + // deliver a document, it returns an HTML document stating so (which often also describes why and + // more). This flag will prevent curl from outputting that and return error 22. + // This method is not fail-safe and there are occasions where non-successful response codes will slip + // through, especially when authentication is involved (response codes 401 and 407). + "--fail", + // Silent or quiet mode. Don't show progress meter or error messages. Makes Curl mute. + "--silent", + // When used with -s it makes curl show an error message if it fails. + "--show-error", + // (HTTP/HTTPS) If the server reports that the requested page has moved to a different location + // (indicated with a Location: header and a 3XX response code), this option will make curl redo the + // request on the new place. If used together with -i, --include or -I, --head, headers from all + // requested pages will be shown. When authentication is used, curl only sends its credentials to the + // initial host. If a redirect takes curl to a different host, it won't be able to intercept the + // user+password. See also --location-trusted on how to change this. You can limit the amount of + // redirects to follow by using the --max-redirs option. + // + // When curl follows a redirect and the request is not a plain GET (for example POST or PUT), it will + // do the following request with a GET if the HTTP response was 301, 302, or 303. If the response + // code was any other 3xx code, curl will re-send the following request using the same unmodified + // method. + "--location", +]; +const validROS1Distros = ["kinetic", "lunar", "melodic", "noetic"]; +const validROS2Distros = ["dashing", "eloquent", "foxy", "rolling"]; +const targetROS1DistroInput = "target-ros1-distro"; +const targetROS2DistroInput = "target-ros2-distro"; +const isLinux = process.platform == "linux"; +const isWindows = process.platform == "win32"; +/** + * Convert local paths to URLs. + * + * The user can pass the VCS repo file either as a URL or a path. + * If it is a path, this function will convert it into a URL (file://...). + * If the file is already passed as an URL, this function does nothing. + * + * @param vcsRepoFileUrl path or URL to the repo file + * @returns URL to the repo file + */ +function resolveVcsRepoFileUrl(vcsRepoFileUrl) { + if (fs_1.default.existsSync(vcsRepoFileUrl)) { + return "file://" + path.resolve(vcsRepoFileUrl); + } + else { + return vcsRepoFileUrl; + } } - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - next() { - if (!url) { - return Promise.resolve({ - done: true - }); +/** + * Execute a command in bash and wrap the output in a log group. + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param commandPrefix optional string used to prefix the command to be executed. + * @param options optional exec options. See ExecOptions + * @param log_message log group title. + * @returns Promise exit code + */ +function execBashCommand(commandLine, commandPrefix, options, log_message) { + return __awaiter(this, void 0, void 0, function* () { + commandPrefix = commandPrefix || ""; + const bashScript = `${commandPrefix}${commandLine}`; + const message = log_message || `Invoking "bash -c '${bashScript}'`; + let toolRunnerCommandLine = ""; + let toolRunnerCommandLineArgs = []; + if (isWindows) { + const which_msbuild = yield io.which("msbuild"); + if (!which_msbuild) { + //todo: mutate instead of copy + options = options || {}; + options.env = options.env || {}; + options.env.PATH = + (options.env.PATH + ";" || false) + + "%programfiles(x86)%Microsoft Visual Studio\\2019\\Enterprise\\MSBuild\\Current\\Bin"; + } + toolRunnerCommandLine = "C:\\Program Files\\Git\\bin\\bash.exe"; + toolRunnerCommandLineArgs = ["-c", bashScript]; } - - return requestMethod({ - method, - url, - headers - }).then(normalizePaginatedListResponse).then(response => { - // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: response - }; + else { + toolRunnerCommandLine = "bash"; + toolRunnerCommandLineArgs = ["-c", bashScript]; + } + const runner = new tr.ToolRunner(toolRunnerCommandLine, toolRunnerCommandLineArgs, options); + return core.group(message, () => { + return runner.exec(); }); - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); + }); } - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; +exports.execBashCommand = execBashCommand; +//Determine whether all inputs name supported ROS distributions. +function validateDistros(ros1Distro, ros2Distro) { + if (!ros1Distro && !ros2Distro) { + core.setFailed(`Neither '${targetROS1DistroInput}' or '${targetROS2DistroInput}' inputs were set, at least one is required.`); + return false; } - - let earlyExit = false; - - function done() { - earlyExit = true; + if (ros1Distro && validROS1Distros.indexOf(ros1Distro) <= -1) { + core.setFailed(`Input ${ros1Distro} was not a valid ROS 1 distribution for '${targetROS1DistroInput}'. Valid values: ${validROS1Distros}`); + return false; } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; + if (ros2Distro && validROS2Distros.indexOf(ros2Distro) <= -1) { + core.setFailed(`Input ${ros2Distro} was not a valid ROS 2 distribution for '${targetROS2DistroInput}'. Valid values: ${validROS2Distros}`); + return false; } - - return gather(octokit, results, iterator, mapFn); - }); -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; + return true; } -paginateRest.VERSION = VERSION; - -exports.paginateRest = paginateRest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 44: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const Endpoints = { - actions: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], - checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { - mediaType: { - previews: ["corsair"] - } - }], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - createSuite: ["POST /repos/{owner}/{repo}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { - mediaType: { - previews: ["antiope"] - } - }], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { - mediaType: { - previews: ["antiope"] - } - }], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { - mediaType: { - previews: ["antiope"] - } - }], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { - mediaType: { - previews: ["antiope"] - } - }], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }] - }, - codeScanning: { - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getConductCode: ["GET /codes_of_conduct/{key}", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }] - }, - emojis: { - get: ["GET /emojis"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { - mediaType: { - previews: ["mockingbird"] - } - }], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] - }, - meta: { - get: ["GET /meta"] - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForAuthenticatedUser: ["GET /user/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForOrg: ["GET /orgs/{org}/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - createCard: ["POST /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - createColumn: ["POST /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - createForAuthenticatedUser: ["POST /user/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForOrg: ["POST /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForRepo: ["POST /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - delete: ["DELETE /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteCard: ["DELETE /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteColumn: ["DELETE /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - get: ["GET /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getCard: ["GET /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getColumn: ["GET /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { - mediaType: { - previews: ["inertia"] - } - }], - listCards: ["GET /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - listCollaborators: ["GET /projects/{project_id}/collaborators", { - mediaType: { - previews: ["inertia"] - } - }], - listColumns: ["GET /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - listForOrg: ["GET /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForRepo: ["GET /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForUser: ["GET /users/{username}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - moveCard: ["POST /projects/columns/cards/{card_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - moveColumn: ["POST /projects/columns/{column_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - update: ["PATCH /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateCard: ["PATCH /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateColumn: ["PATCH /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { - mediaType: { - previews: ["lydian"] - } - }], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] - }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteLegacy: ["DELETE /reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }, { - deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" - }], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }] - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { - mediaType: { - previews: ["baptiste"] - } - }], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", { - mediaType: { - previews: ["black-panther"] +exports.validateDistros = validateDistros; +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const repo = github.context.repo; + const workspace = process.env.GITHUB_WORKSPACE; + const colconMixinName = core.getInput("colcon-mixin-name"); + const colconMixinRepo = core.getInput("colcon-mixin-repository"); + const extraCmakeArgs = core.getInput("extra-cmake-args"); + const colconExtraArgs = core.getInput("colcon-extra-args"); + const importToken = core.getInput("import-token"); + const packageName = core.getInput("package-name", { required: true }); + const packageNameList = packageName.split(RegExp("\\s")); + const rosWorkspaceName = "ros_ws"; + core.setOutput("ros-workspace-directory-name", rosWorkspaceName); + const rosWorkspaceDir = path.join(workspace, rosWorkspaceName); + const targetRos1Distro = core.getInput(targetROS1DistroInput); + const targetRos2Distro = core.getInput(targetROS2DistroInput); + const vcsRepoFileUrlListAsString = core.getInput("vcs-repo-file-url") || ""; + const vcsRepoFileUrlList = vcsRepoFileUrlListAsString.split(RegExp("\\s")); + const vcsRepoFileUrlListNonEmpty = vcsRepoFileUrlList.filter((x) => x != ""); + const vcsRepoFileUrlListResolved = vcsRepoFileUrlListNonEmpty.map((x) => resolveVcsRepoFileUrl(x)); + const coverageIgnorePattern = core.getInput("coverage-ignore-pattern"); + if (!validateDistros(targetRos1Distro, targetRos2Distro)) { + return; + } + // rosdep does not reliably work on Windows, see + // ros-infrastructure/rosdep#610 for instance. So, we do not run it. + if (!isWindows) { + yield execBashCommand("rosdep update"); + } + // Reset colcon configuration. + yield io.rmRF(path.join(os.homedir(), ".colcon")); + // Wipe out the workspace directory to ensure the workspace is always + // identical. + yield io.rmRF(rosWorkspaceDir); + // Checkout ROS 2 from source and install ROS 2 system dependencies + yield io.mkdirP(rosWorkspaceDir + "/src"); + const options = { + cwd: rosWorkspaceDir, + }; + const curlFlags = curlFlagsArray.join(" "); + for (const vcsRepoFileUrl of vcsRepoFileUrlListResolved) { + yield execBashCommand(`curl ${curlFlags} '${vcsRepoFileUrl}' | vcs import --force --recursive src/`, undefined, options); + } + // If the package under tests is part of ros.repos, remove it first. + // We do not want to allow the "default" head state of the package to + // to be present in the workspace, and colcon will fail stating it found twice + // a package with an identical name. + const posixRosWorkspaceDir = isWindows + ? rosWorkspaceDir.replace(/\\/g, "/") + : rosWorkspaceDir; + yield execBashCommand(`find "${posixRosWorkspaceDir}" -type d -and -name "${repo["repo"]}" | xargs rm -rf`); + // The repo file for the repository needs to be generated on-the-fly to + // incorporate the custom repository URL and branch name, when a PR is + // being built. + let repoFullName = process.env.GITHUB_REPOSITORY; + if (github.context.payload.pull_request) { + repoFullName = github.context.payload.pull_request.head.repo.full_name; + } + let tokenAuth = importToken; + if (tokenAuth !== "") { + tokenAuth = tokenAuth.concat("@"); + } + const headRef = process.env.GITHUB_HEAD_REF; + const commitRef = headRef || github.context.sha; + const repoFilePath = path.join(rosWorkspaceDir, "package.repo"); + const repoFileContent = `repositories: + ${repo["repo"]}: + type: git + url: 'https://${tokenAuth}github.com/${repoFullName}.git' + version: '${commitRef}'`; + fs_1.default.writeFileSync(repoFilePath, repoFileContent); + yield execBashCommand("vcs import --force --recursive src/ < package.repo", undefined, options); + // Remove all repositories the package under test does not depend on, to + // avoid having rosdep installing unrequired dependencies. + yield execBashCommand(`diff --new-line-format="" --unchanged-line-format="" <(colcon list -p) <(colcon list --packages-up-to ${packageNameList.join(" ")} -p) | xargs rm -rf`, undefined, options); + // Install ROS dependencies for each distribution being sourced + if (targetRos1Distro) { + yield execBashCommand(`DEBIAN_FRONTEND=noninteractive RTI_NC_LICENSE_ACCEPTED=yes rosdep install -r --from-paths src --ignore-src --rosdistro ${targetRos1Distro} -y || true`, undefined, options); + } + if (targetRos2Distro) { + yield execBashCommand(`DEBIAN_FRONTEND=noninteractive RTI_NC_LICENSE_ACCEPTED=yes rosdep install -r --from-paths src --ignore-src --rosdistro ${targetRos2Distro} -y || true`, undefined, options); + } + if (colconMixinName !== "" && colconMixinRepo !== "") { + yield execBashCommand(`colcon mixin add default '${colconMixinRepo}'`); + yield execBashCommand("colcon mixin update default"); + } + let extra_options = []; + if (colconMixinName !== "") { + extra_options = extra_options.concat(["--mixin", colconMixinName]); + } + if (colconExtraArgs !== "") { + extra_options = extra_options.concat(colconExtraArgs); + } + // Add the future install bin directory to PATH. + // This enables cmake find_package to find packages installed in the + // colcon install directory, even if local_setup.sh has not been sourced. + // + // From the find_package doc: + // https://cmake.org/cmake/help/latest/command/find_package.html + // 5. Search the standard system environment variables. + // Path entries ending in /bin or /sbin are automatically converted to + // their parent directories: + // PATH + // + // ament_cmake should handle this automatically, but we are seeing cases + // where this does not happen. See issue #26 for relevant CI logs. + core.addPath(path.join(rosWorkspaceDir, "install", "bin")); + // Source any installed ROS distributions if they are present + let colconCommandPrefix = ""; + if (isLinux) { + if (targetRos1Distro) { + const ros1SetupPath = `/opt/ros/${targetRos1Distro}/setup.sh`; + if (fs_1.default.existsSync(ros1SetupPath)) { + colconCommandPrefix += `source ${ros1SetupPath} && `; + } + } + if (targetRos2Distro) { + const ros2SetupPath = `/opt/ros/${targetRos2Distro}/setup.sh`; + if (fs_1.default.existsSync(ros2SetupPath)) { + colconCommandPrefix += `source ${ros2SetupPath} && `; + } + } + } + else if (isWindows) { + // Windows only supports ROS2 + if (targetRos2Distro) { + const ros2SetupPath = `c:/dev/${targetRos2Distro}/ros2-windows/setup.bat`; + if (fs_1.default.existsSync(ros2SetupPath)) { + colconCommandPrefix += `${ros2SetupPath} && `; + } + } + } + let colconBuildCmd = [ + `colcon build`, + `--event-handlers console_cohesion+`, + `--packages-up-to ${packageNameList.join(" ")}`, + `${extra_options.join(" ")}`, + `--cmake-args ${extraCmakeArgs}`, + ].join(" "); + if (!isWindows) { + colconBuildCmd = colconBuildCmd.concat(" --symlink-install"); + } + yield execBashCommand(colconBuildCmd, colconCommandPrefix, options); + // ignoreReturnCode is set to true to avoid having a lack of coverage + // data fail the build. + const colconLcovInitialCmd = "colcon lcov-result --initial"; + yield execBashCommand(colconLcovInitialCmd, colconCommandPrefix, { + cwd: rosWorkspaceDir, + ignoreReturnCode: true, + }); + const colconTestCmd = [ + `colcon test`, + `--event-handlers console_cohesion+`, + `--pytest-with-coverage`, + `--return-code-on-test-failure`, + `--packages-select ${packageNameList.join(" ")}`, + `${extra_options.join(" ")}`, + ].join(" "); + yield execBashCommand(colconTestCmd, colconCommandPrefix, options); + // ignoreReturnCode, check comment above in --initial + const colconLcovResultCmd = [ + `colcon lcov-result`, + `--filter ${coverageIgnorePattern}`, + `--packages-select ${packageNameList.join(" ")}`, + ].join(" "); + yield execBashCommand(colconLcovResultCmd, colconCommandPrefix, { + cwd: rosWorkspaceDir, + ignoreReturnCode: true, + }); + const colconCoveragepyResultCmd = [ + `colcon coveragepy-result`, + `--packages-select ${packageNameList.join(" ")}`, + ].join(" "); + yield execBashCommand(colconCoveragepyResultCmd, colconCommandPrefix, options); + core.setOutput("ros-workspace-directory-name", rosWorkspaceName); + } + catch (error) { + core.setFailed(error.message); + } + }); +} +run(); + + +/***/ }), + +/***/ 385: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var isPlainObject = _interopDefault(__webpack_require__(696)); +var universalUserAgent = __webpack_require__(796); + +function lowercaseKeys(object) { + if (!object) { + return {}; + } + + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; +} + +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates + + + options.headers = lowercaseKeys(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + + if (names.length === 0) { + return url; + } + + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +const urlVariableRegex = /\{[^}]+\}/g; + +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} + +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + + if (!matches) { + return []; + } + + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + + return part; + }).join(""); +} + +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} + +function isDefined(value) { + return value !== undefined && value !== null; +} + +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} + +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; + + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); } - }], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { - mediaType: { - previews: ["groot"] + + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } } - }], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { - mediaType: { - previews: ["groot"] + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); } - }], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + + return result; +} + +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} + +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); } - }], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits", { - mediaType: { - previews: ["cloak"] + + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + + if (operator && operator !== "+") { + var separator = ","; + + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } - }], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics", { - mediaType: { - previews: ["mercy"] + } else { + return encodeReserved(literal); + } + }); +} + +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible + + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); + + if (!isBinaryRequset) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + } + + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + + + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; } - }], - users: ["GET /search/users"] + } + } // default content-type for JSON if body is set + + + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + + + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present + + + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); +} + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} + +const VERSION = "6.0.3"; + +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. + +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + format: "", + previews: [] + } +}; + +const endpoint = withDefaults(null, DEFAULTS); + +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 389: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const fs = __webpack_require__(747); +const shebangCommand = __webpack_require__(866); + +function readShebang(command) { + // Read the first 150 bytes from the file + const size = 150; + let buffer; + + if (Buffer.alloc) { + // Node.js v4.5+ / v5.10+ + buffer = Buffer.alloc(size); + } else { + // Old Node.js API + buffer = new Buffer(size); + buffer.fill(0); // zero-fill + } + + let fd; + + try { + fd = fs.openSync(command, 'r'); + fs.readSync(fd, buffer, 0, size, 0); + fs.closeSync(fd); + } catch (e) { /* Empty */ } + + // Attempt to extract shebang (null is returned if not a shebang) + return shebangCommand(buffer.toString()); +} + +module.exports = readShebang; + + +/***/ }), + +/***/ 413: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = __webpack_require__(141); + + +/***/ }), + +/***/ 427: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +// Older verions of Node.js might not have `util.getSystemErrorName()`. +// In that case, fall back to a deprecated internal. +const util = __webpack_require__(669); + +let uv; + +if (typeof util.getSystemErrorName === 'function') { + module.exports = util.getSystemErrorName; +} else { + try { + uv = process.binding('uv'); + + if (typeof uv.errname !== 'function') { + throw new TypeError('uv.errname is not a function'); + } + } catch (err) { + console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); + uv = null; + } + + module.exports = code => errname(uv, code); +} + +// Used for testing the fallback behavior +module.exports.__test__ = errname; + +function errname(uv, code) { + if (uv) { + return uv.errname(code); + } + + if (!(code < 0)) { + throw new Error('err >= 0'); + } + + return `Unknown system error ${code}`; +} + + + +/***/ }), + +/***/ 431: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __importStar(__webpack_require__(87)); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +function escapeData(s) { + return toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 448: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +var universalUserAgent = __webpack_require__(796); +var beforeAfterHook = __webpack_require__(523); +var request = __webpack_require__(753); +var graphql = __webpack_require__(898); +var authToken = __webpack_require__(813); + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + if (enumerableOnly) symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +const VERSION = "3.1.0"; + +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + hook: hook.bind(null, "request") + }), mediaType: { - previews: ["inertia"] + previews: [], + format: "" + } + }; // prepend default user agent with `options.userAgent` if set + + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { + baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") + })); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; } - }], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] + } else { + const auth = options.authStrategy(Object.assign({ + request: this.request + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 + + + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } + + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); } - }], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { - mediaType: { - previews: ["inertia"] + + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + + + static plugin(...newPlugins) { + var _a; + + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } + +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; + +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 453: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var once = __webpack_require__(969) +var eos = __webpack_require__(9) +var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes + +var noop = function () {} +var ancient = /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' +} + +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} + +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} + +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) + + var closed = false + stream.on('close', function () { + closed = true + }) + + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) + + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true + + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) + } +} + +var call = function (fn) { + fn() +} + +var pipe = function (from, to) { + return from.pipe(to) +} + +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') + + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) + + return streams.reduce(pipe) +} + +module.exports = pump + + +/***/ }), + +/***/ 454: +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(__webpack_require__(794)); +var http = _interopDefault(__webpack_require__(605)); +var Url = _interopDefault(__webpack_require__(835)); +var https = _interopDefault(__webpack_require__(211)); +var zlib = _interopDefault(__webpack_require__(761)); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = __webpack_require__(18).convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout + }; + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; + + +/***/ }), + +/***/ 462: +/***/ (function(module) { + +"use strict"; + + +// See http://www.robvanderwoude.com/escapechars.php +const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; + +function escapeCommand(arg) { + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + return arg; +} + +function escapeArgument(arg, doubleEscapeMetaChars) { + // Convert to string + arg = `${arg}`; + + // Algorithm below is based on https://qntm.org/cmd + + // Sequence of backslashes followed by a double quote: + // double up all the backslashes and escape the double quote + arg = arg.replace(/(\\*)"/g, '$1$1\\"'); + + // Sequence of backslashes followed by the end of the string + // (which will become a double quote later): + // double up all the backslashes + arg = arg.replace(/(\\*)$/, '$1$1'); + + // All other backslashes occur literally + + // Quote the whole thing: + arg = `"${arg}"`; + + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + // Double escape meta chars if necessary + if (doubleEscapeMetaChars) { + arg = arg.replace(metaCharsRegExp, '^$1'); + } + + return arg; +} + +module.exports.command = escapeCommand; +module.exports.argument = escapeArgument; + + +/***/ }), + +/***/ 463: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = __webpack_require__(692); +var once = _interopDefault(__webpack_require__(969)); + +const logOnce = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ + +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = "HttpError"; + this.status = statusCode; + Object.defineProperty(this, "code", { + get() { + logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; } - }], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] + + }); + this.headers = options.headers || {}; // redact request credentials without mutating original request options + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; } + +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 469: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__webpack_require__(262)); +const utils_1 = __webpack_require__(521); +exports.context = new Context.Context(); +/** + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokit(token, options) { + return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); +} +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map + +/***/ }), + +/***/ 470: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; }; +Object.defineProperty(exports, "__esModule", { value: true }); +const command_1 = __webpack_require__(431); +const os = __importStar(__webpack_require__(87)); +const path = __importStar(__webpack_require__(622)); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = command_1.toCommandValue(val); + process.env[name] = convertedVal; + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + command_1.issueCommand('add-path', {}, inputPath); + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. The value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + */ +function error(message) { + command_1.issue('error', message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds an warning issue + * @param message warning issue message. Errors will be converted to string via toString() + */ +function warning(message) { + command_1.issue('warning', message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +//# sourceMappingURL=core.js.map -const VERSION = "4.1.4"; +/***/ }), -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; +/***/ 489: +/***/ (function(module, __unusedexports, __webpack_require__) { - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); +"use strict"; - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - const scopeMethods = newMethods[scope]; +const path = __webpack_require__(622); +const which = __webpack_require__(814); +const pathKey = __webpack_require__(39)(); - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } +function resolveCommandAttempt(parsed, withoutPathExt) { + const cwd = process.cwd(); + const hasCustomCwd = parsed.options.cwd != null; - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + // If a custom `cwd` was specified, we need to change the process cwd + // because `which` will do stat calls but does not support a custom cwd + if (hasCustomCwd) { + try { + process.chdir(parsed.options.cwd); + } catch (err) { + /* Empty */ + } } - } - return newMethods; + let resolved; + + try { + resolved = which.sync(parsed.command, { + path: (parsed.options.env || process.env)[pathKey], + pathExt: withoutPathExt ? path.delimiter : undefined, + }); + } catch (e) { + /* Empty */ + } finally { + process.chdir(cwd); + } + + // If we successfully resolved, ensure that an absolute path is returned + // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it + if (resolved) { + resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); + } + + return resolved; } -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ +function resolveCommand(parsed) { + return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +} - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` +module.exports = resolveCommand; - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); + +/***/ }), + +/***/ 510: +/***/ (function(module) { + +module.exports = addHook + +function addHook (state, kind, name, hook) { + var orig = hook + if (!state.registry[name]) { + state.registry[name] = [] + } + + if (kind === 'before') { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)) } + } - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + if (kind === 'after') { + hook = function (method, options) { + var result + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_ + return orig(result, options) + }) + .then(function () { + return result + }) + } + } + + if (kind === 'error') { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options) + }) + } + } + + state.registry[name].push({ + hook: hook, + orig: orig + }) +} + + +/***/ }), + +/***/ 521: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOctokitOptions = exports.GitHub = exports.context = void 0; +const Context = __importStar(__webpack_require__(262)); +const Utils = __importStar(__webpack_require__(127)); +// octokit + plugins +const core_1 = __webpack_require__(448); +const plugin_rest_endpoint_methods_1 = __webpack_require__(842); +const plugin_paginate_rest_1 = __webpack_require__(299); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +const defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } +/***/ }), - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); +/***/ 523: +/***/ (function(module, __unusedexports, __webpack_require__) { - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); +var register = __webpack_require__(363) +var addHook = __webpack_require__(510) +var removeHook = __webpack_require__(763) - if (!(alias in options)) { - options[alias] = options[name]; - } +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind +var bindable = bind.bind(bind) - delete options[name]; - } - } +function bindApi (hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) + hook.api = { remove: removeHookRef } + hook.remove = removeHookRef - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind] + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) + }) +} +function HookSingular () { + var singularHookName = 'h' + var singularHookState = { + registry: {} + } + var singularHook = register.bind(null, singularHookState, singularHookName) + bindApi(singularHook, singularHookState, singularHookName) + return singularHook +} - return requestWithDefaults(...args); +function HookCollection () { + var state = { + registry: {} } - return Object.assign(withDecorations, requestWithDefaults); -} + var hook = register.bind(null, state) + bindApi(hook, state) -/** - * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary - * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is - * done, we will remove the registerEndpoints methods and return the methods - * directly as with the other plugins. At that point we will also remove the - * legacy workarounds and deprecations. - * - * See the plan at - * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 - */ + return hook +} -function restEndpointMethods(octokit) { - return endpointsToMethods(octokit, Endpoints); +var collectionHookDeprecationMessageDisplayed = false +function Hook () { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') + collectionHookDeprecationMessageDisplayed = true + } + return HookCollection() } -restEndpointMethods.VERSION = VERSION; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map +Hook.Singular = HookSingular.bind() +Hook.Collection = HookCollection.bind() + +module.exports = Hook +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook +module.exports.Singular = Hook.Singular +module.exports.Collection = Hook.Collection /***/ }), -/***/ 537: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ 539: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __webpack_require__(932); -var once = _interopDefault(__webpack_require__(223)); - -const logOnce = once(deprecation => console.warn(deprecation)); +Object.defineProperty(exports, "__esModule", { value: true }); +const url = __webpack_require__(835); +const http = __webpack_require__(605); +const https = __webpack_require__(211); +const pm = __webpack_require__(950); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); /** - * Error with extra properties to help with debugging + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = url.parse(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + async getJson(requestUrl, additionalHeaders = {}) { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + let res = await this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); } - - this.name = "HttpError"; - this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); + async postJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 234: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __webpack_require__(440); -var universalUserAgent = __webpack_require__(429); -var isPlainObject = _interopDefault(__webpack_require__(840)); -var nodeFetch = _interopDefault(__webpack_require__(467)); -var requestError = __webpack_require__(537); - -const VERSION = "5.4.7"; - -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -function fetchWrapper(requestOptions) { - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; + async putJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); } - - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests - - - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - - throw new requestError.RequestError(response.statusText, status, { - headers, - request: requestOptions - }); + async patchJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); } - - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - headers, - request: requestOptions - }); + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + async request(verb, requestUrl, data, headers) { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + let parsedUrl = url.parse(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = await this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + let parsedRedirectUrl = url.parse(redirectUrl); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = await this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + await response.readBody(); + await this._performExponentialBackoff(numTries); + } + } + return response; } - - if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { - headers, - request: requestOptions + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); }); - - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format - - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); } - - throw error; - }); - } - - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + let parsedUrl = url.parse(serverUrl); + return this._getAgent(parsedUrl); } - - return getBufferResponse(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + this.handlers.forEach(handler => { + handler.prepareRequest(info.options); + }); + } + return info; } - - throw new requestError.RequestError(error.message, 500, { - headers, - request: requestOptions - }); - }); -} - -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; - - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} - -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); - -exports.request = request; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 682: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - -var register = __webpack_require__(670) -var addHook = __webpack_require__(549) -var removeHook = __webpack_require__(819) - -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) - -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) -} - -function HookSingular () { - var singularHookName = 'h' - var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook -} - -function HookCollection () { - var state = { - registry: {} - } - - var hook = register.bind(null, state) - bindApi(hook, state) - - return hook -} - -var collectionHookDeprecationMessageDisplayed = false -function Hook () { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true - } - return HookCollection() -} - -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() - -module.exports = Hook -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection - - -/***/ }), - -/***/ 549: -/***/ ((module) => { - -module.exports = addHook - -function addHook (state, kind, name, hook) { - var orig = hook - if (!state.registry[name]) { - state.registry[name] = [] - } - - if (kind === 'before') { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)) + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; } - } - - if (kind === 'after') { - hook = function (method, options) { - var result - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_ - return orig(result, options) - }) - .then(function () { - return result - }) + _getAgent(parsedUrl) { + let agent; + let proxyUrl = pm.getProxyUrl(parsedUrl); + let useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __webpack_require__(413); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + proxyAuth: proxyUrl.auth, + host: proxyUrl.hostname, + port: proxyUrl.port + } + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; } - } - - if (kind === 'error') { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options) - }) + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); } - } - - state.registry[name].push({ - hook: hook, - orig: orig - }) -} - - -/***/ }), - -/***/ 670: -/***/ ((module) => { - -module.exports = register - -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') - } - - if (!options) { - options = {} - } - - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() - } - - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) - } - - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) -} - - -/***/ }), - -/***/ 819: -/***/ ((module) => { - -module.exports = removeHook - -function removeHook (state, name, method) { - if (!state.registry[name]) { - return - } - - var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) - - if (index === -1) { - return - } - - state.registry[name].splice(index, 1) -} - - -/***/ }), - -/***/ 932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new Error(msg); + // attach statusCode and body obj (if available) to the error object + err['statusCode'] = statusCode; + if (response.result) { + err['result'] = response.result; + } + reject(err); + } + else { + resolve(response); + } + }); + } +} +exports.HttpClient = HttpClient; - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 840: -/***/ ((module) => { - -"use strict"; - - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -module.exports = isPlainObject; - - -/***/ }), - -/***/ 467: -/***/ ((module, exports, __webpack_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(__webpack_require__(413)); -var http = _interopDefault(__webpack_require__(605)); -var Url = _interopDefault(__webpack_require__(835)); -var https = _interopDefault(__webpack_require__(211)); -var zlib = _interopDefault(__webpack_require__(761)); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } +/***/ }), - this[BUFFER] = Buffer.concat(buffers); +/***/ 568: +/***/ (function(module, __unusedexports, __webpack_require__) { - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; +"use strict"; - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} +const path = __webpack_require__(622); +const niceTry = __webpack_require__(948); +const resolveCommand = __webpack_require__(489); +const escape = __webpack_require__(462); +const readShebang = __webpack_require__(389); +const semver = __webpack_require__(280); -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); +const isWin = process.platform === 'win32'; +const isExecutableRegExp = /\.(?:com|exe)$/i; +const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); +// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 +const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ +function detectShebang(parsed) { + parsed.file = resolveCommand(parsed); -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); + const shebang = parsed.file && readShebang(parsed.file); - this.message = message; - this.type = type; + if (shebang) { + parsed.args.unshift(parsed.file); + parsed.command = shebang; - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } + return resolveCommand(parsed); + } - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); + return parsed.file; } -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; +function parseNonShell(parsed) { + if (!isWin) { + return parsed; + } -let convert; -try { - convert = __webpack_require__(877).convert; -} catch (e) {} + // Detect & add support for shebangs + const commandFile = detectShebang(parsed); -const INTERNALS = Symbol('Body internals'); + // We don't need a shell if the command filename is an executable + const needsShell = !isExecutableRegExp.test(commandFile); -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; + // If a shell is required, use cmd.exe and take care of escaping everything correctly + // Note that `forceShell` is an hidden option used only in tests + if (parsed.options.forceShell || needsShell) { + // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` + // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument + // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, + // we need to double escape them + const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; + // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) + // This is necessary otherwise it will always fail with ENOENT in those cases + parsed.command = path.normalize(parsed.command); - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; + // Escape command & arguments + parsed.command = escape.command(parsed.command); + parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + const shellCommand = [parsed.command].concat(parsed.args).join(' '); - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.command = process.env.comspec || 'cmd.exe'; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } + return parsed; } -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, +function parseShell(parsed) { + // If node supports the shell option, there's no need to mimic its behavior + if (supportsShellOption) { + return parsed; + } - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, + // Mimic node shell option + // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 + const shellCommand = [parsed.command].concat(parsed.args).join(' '); - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; + if (isWin) { + parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } else { + if (typeof parsed.options.shell === 'string') { + parsed.command = parsed.options.shell; + } else if (process.platform === 'android') { + parsed.command = '/system/bin/sh'; + } else { + parsed.command = '/bin/sh'; + } - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, + parsed.args = ['-c', shellCommand]; + } - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, + return parsed; +} - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, +function parse(command, args, options) { + // Normalize arguments, similar to nodejs + if (args && !Array.isArray(args)) { + options = args; + args = null; + } - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; + args = args ? args.slice(0) : []; // Clone array to avoid changing the original + options = Object.assign({}, options); // Clone object to avoid changing the original + + // Build our parsed object + const parsed = { + command, + args, + options, + file: undefined, + original: { + command, + args, + }, + }; - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; + // Delegate further parsing to shell or non-shell + return options.shell ? parseShell(parsed) : parseNonShell(parsed); +} -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); +module.exports = parse; -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; +/***/ }), - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } +/***/ 605: +/***/ (function(module) { - this[INTERNALS].disturbed = true; +module.exports = require("http"); - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } +/***/ }), - let body = this.body; +/***/ 614: +/***/ (function(module) { - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } +module.exports = require("events"); - // body is blob - if (isBlob(body)) { - body = body.stream(); - } +/***/ }), - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } +/***/ 621: +/***/ (function(module, __unusedexports, __webpack_require__) { - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); +"use strict"; + +const path = __webpack_require__(622); +const pathKey = __webpack_require__(39); + +module.exports = opts => { + opts = Object.assign({ + cwd: process.cwd(), + path: process.env[pathKey()] + }, opts); + + let prev; + let pth = path.resolve(opts.cwd); + const ret = []; + + while (prev !== pth) { + ret.push(path.join(pth, 'node_modules/.bin')); + prev = pth; + pth = path.resolve(pth, '..'); } - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; + // ensure the running `node` binary is used + ret.push(path.dirname(process.execPath)); - return new Body.Promise(function (resolve, reject) { - let resTimeout; + return ret.concat(opts.path).join(path.delimiter); +}; - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } +module.exports.env = opts => { + opts = Object.assign({ + env: process.env + }, opts); - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); + const env = Object.assign({}, opts.env); + const path = pathKey({env}); - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } + opts.path = env[path]; + env[path] = module.exports(opts); - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } + return env; +}; - accumBytes += chunk.length; - accum.push(chunk); - }); - body.on('end', function () { - if (abort) { - return; - } +/***/ }), - clearTimeout(resTimeout); +/***/ 622: +/***/ (function(module) { - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} +module.exports = require("path"); -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } +/***/ }), - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; +/***/ 631: +/***/ (function(module) { - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } +module.exports = require("net"); - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); +/***/ }), - // html5 - if (!res && str) { - res = /= maxDepth) + return exports.mkdir(fsPath); + try { + yield exports.mkdir(fsPath); + return; + } + catch (err) { + switch (err.code) { + case 'ENOENT': { + yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); + yield exports.mkdir(fsPath); + return; + } + default: { + let stats; + try { + stats = yield exports.stat(fsPath); + } + catch (err2) { + throw err; + } + if (!stats.isDirectory()) + throw err; + } + } + } + }); } - +exports.mkdirP = mkdirP; /** - * Check if `obj` is a W3C `Blob` object (which `File` inherits from) - * @param {*} obj - * @return {boolean} + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. */ -function isBlob(obj) { - return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +//# sourceMappingURL=io-util.js.map + +/***/ }), + +/***/ 686: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __importStar(__webpack_require__(87)); +const events = __importStar(__webpack_require__(614)); +const child = __importStar(__webpack_require__(129)); +const path = __importStar(__webpack_require__(622)); +const io = __importStar(__webpack_require__(1)); +const ioUtil = __importStar(__webpack_require__(672)); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + strBuffer = s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + const stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + const errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + }); + }); + } } - +exports.ToolRunner = ToolRunner; /** - * Clone body given Res/Req instance + * Convert an arg string to an array of args. Handles escaping * - * @param Mixed instance Response or Request instance - * @return Mixed + * @param argString string of arguments + * @returns string[] array of arguments */ -function clone(instance) { - let p1, p2; - let body = instance.body; - - // don't allow cloning a used body - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used'); - } - - // check that body is a stream and not form-data object - // note: we can't clone the form-data object without having it as a dependency - if (body instanceof Stream && typeof body.getBoundary !== 'function') { - // tee instance body - p1 = new PassThrough(); - p2 = new PassThrough(); - body.pipe(p1); - body.pipe(p2); - // set instance body to teed body and return the other teed body - instance[INTERNALS].body = p1; - body = p2; - } - - return body; +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; } - -/** - * Performs the operation "extract a `Content-Type` value from |object|" as - * specified in the specification: - * https://fetch.spec.whatwg.org/#concept-bodyinit-extract - * - * This function assumes that instance.body is present. - * - * @param Mixed instance Any options.body input - */ -function extractContentType(body) { - if (body === null) { - // body is null - return null; - } else if (typeof body === 'string') { - // body is string - return 'text/plain;charset=UTF-8'; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - return 'application/x-www-form-urlencoded;charset=UTF-8'; - } else if (isBlob(body)) { - // body is blob - return body.type || null; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return null; - } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - return null; - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - return null; - } else if (typeof body.getBoundary === 'function') { - // detect form data input from form-data module - return `multipart/form-data;boundary=${body.getBoundary()}`; - } else if (body instanceof Stream) { - // body is stream - // can't really do much about this - return null; - } else { - // Body constructor defaults other things to string - return 'text/plain;charset=UTF-8'; - } +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } } +//# sourceMappingURL=toolrunner.js.map -/** - * The Fetch Standard treats this as if "total bytes" is a property on the body. - * For us, we have to explicitly get it with a function. - * - * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes - * - * @param Body instance Instance of Body - * @return Number? Number of bytes, or null if not possible - */ -function getTotalBytes(instance) { - const body = instance.body; +/***/ }), +/***/ 692: +/***/ (function(__unusedmodule, exports) { - if (body === null) { - // body is null - return 0; - } else if (isBlob(body)) { - return body.size; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return body.length; - } else if (body && typeof body.getLengthSync === 'function') { - // detect form data input from form-data module - if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) { - // 2.x - return body.getLengthSync(); - } - return null; - } else { - // body is stream - return null; - } -} +"use strict"; -/** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. - * - * @param Body instance Instance of Body - * @return Void - */ -function writeToStream(dest, instance) { - const body = instance.body; +Object.defineProperty(exports, '__esModule', { value: true }); - if (body === null) { - // body is null - dest.end(); - } else if (isBlob(body)) { - body.stream().pipe(dest); - } else if (Buffer.isBuffer(body)) { - // body is buffer - dest.write(body); - dest.end(); - } else { - // body is stream - body.pipe(dest); - } -} +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) -// expose Promise -Body.Promise = global.Promise; + /* istanbul ignore next */ -/** - * headers.js - * - * Headers class offers convenient helpers - */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } -const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; + this.name = 'Deprecation'; + } -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } } -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } -} +exports.Deprecation = Deprecation; -/** - * Find the key in the map object given a header name. - * - * Returns undefined if not found. - * - * @param String name Header name - * @return String|Undefined - */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } - } - return undefined; -} -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; +/***/ }), - this[MAP] = Object.create(null); +/***/ 696: +/***/ (function(module) { - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); +"use strict"; - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - return; - } +/*! + * isobject + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } +function isObject(val) { + return val != null && typeof val === 'object' && Array.isArray(val) === false; +} - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } +function isObjectObject(o) { + return isObject(o) === true + && Object.prototype.toString.call(o) === '[object Object]'; +} - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } +function isPlainObject(o) { + var ctor,prot; - return this[MAP][key].join(', '); - } + if (isObjectObject(o) === false) return false; - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + // If has modified constructor + ctor = o.constructor; + if (typeof ctor !== 'function') return false; + + // If has modified prototype + prot = ctor.prototype; + if (isObjectObject(prot) === false) return false; - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } + // Most likely a plain Object + return true; +} - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } +module.exports = isPlainObject; - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } +/***/ }), - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } +/***/ 697: +/***/ (function(module) { - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } +"use strict"; - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } +module.exports = (promise, onFinally) => { + onFinally = onFinally || (() => {}); + + return promise.then( + val => new Promise(resolve => { + resolve(onFinally()); + }).then(() => val), + err => new Promise(resolve => { + resolve(onFinally()); + }).then(() => { + throw err; + }) + ); +}; - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } +/***/ }), + +/***/ 742: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var fs = __webpack_require__(747) +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = __webpack_require__(818) +} else { + core = __webpack_require__(197) } -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); +module.exports = isexe +isexe.sync = sync -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } -const INTERNAL = Symbol('internal'); + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) + }) +} -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } } -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; +/***/ }), - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } +/***/ 747: +/***/ (function(module) { - this[INTERNAL].index = index + 1; +module.exports = require("fs"); - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); +/***/ }), -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); +/***/ 753: +/***/ (function(__unusedmodule, exports, __webpack_require__) { -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); +"use strict"; - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - return obj; -} +Object.defineProperty(exports, '__esModule', { value: true }); -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = __webpack_require__(385); +var universalUserAgent = __webpack_require__(796); +var isPlainObject = _interopDefault(__webpack_require__(696)); +var nodeFetch = _interopDefault(__webpack_require__(454)); +var requestError = __webpack_require__(463); + +const VERSION = "5.4.5"; + +function getBufferResponse(response) { + return response.arrayBuffer(); } -const INTERNALS$1 = Symbol('Response internals'); +function fetchWrapper(requestOptions) { + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, requestOptions.request)).then(response => { + url = response.url; + status = response.status; -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } - Body.call(this, body, opts); + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests - const status = opts.status || 200; - const headers = new Headers(opts.headers); - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } + throw new requestError.RequestError(response.statusText, status, { + headers, + request: requestOptions + }); + } - get url() { - return this[INTERNALS$1].url || ''; - } + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + headers, + request: requestOptions + }); + } - get status() { - return this[INTERNALS$1].status; - } + if (status >= 400) { + return response.text().then(message => { + const error = new requestError.RequestError(message, status, { + headers, + request: requestOptions + }); - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } + try { + let responseBody = JSON.parse(error.message); + Object.assign(error, responseBody); + let errors = responseBody.errors; // Assumption `errors` would always be in Array format - get redirected() { - return this[INTERNALS$1].counter > 0; - } + error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); + } catch (e) {// ignore, see octokit/rest.js#684 + } - get statusText() { - return this[INTERNALS$1].statusText; - } + throw error; + }); + } - get headers() { - return this[INTERNALS$1].headers; - } + const contentType = response.headers.get("content-type"); - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} + if (/application\/json/.test(contentType)) { + return response.json(); + } -Body.mixIn(Response.prototype); + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); + return getBufferResponse(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) { + throw error; + } -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); + throw new requestError.RequestError(error.message, 500, { + headers, + request: requestOptions + }); + }); +} -const INTERNALS$2 = Symbol('Request internals'); +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); } -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); - let parsedURL; +exports.request = request; +//# sourceMappingURL=index.js.map - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); +/***/ }), - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } +/***/ 761: +/***/ (function(module) { + +module.exports = require("zlib"); + +/***/ }), + +/***/ 763: +/***/ (function(module) { + +module.exports = removeHook + +function removeHook (state, name, method) { + if (!state.registry[name]) { + return + } - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + var index = state.registry[name] + .map(function (registered) { return registered.orig }) + .indexOf(method) - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); + if (index === -1) { + return + } - const headers = new Headers(init.headers || input.headers || {}); + state.registry[name].splice(index, 1) +} - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; +/***/ }), - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } +/***/ 768: +/***/ (function(module) { - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; +"use strict"; - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } +module.exports = function (x) { + var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); + var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); - get method() { - return this[INTERNALS$2].method; + if (x[x.length - 1] === lf) { + x = x.slice(0, x.length - 1); } - get url() { - return format_url(this[INTERNALS$2].parsedURL); + if (x[x.length - 1] === cr) { + x = x.slice(0, x.length - 1); } - get headers() { - return this[INTERNALS$2].headers; - } + return x; +}; - get redirect() { - return this[INTERNALS$2].redirect; - } - get signal() { - return this[INTERNALS$2].signal; - } +/***/ }), - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} +/***/ 794: +/***/ (function(module) { -Body.mixIn(Request.prototype); +module.exports = require("stream"); -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); +/***/ }), -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); +/***/ 796: +/***/ (function(__unusedmodule, exports, __webpack_require__) { -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); +"use strict"; - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } +Object.defineProperty(exports, '__esModule', { value: true }); - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } +var osName = _interopDefault(__webpack_require__(2)); - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } +function getUserAgent() { + try { + return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; + } catch (error) { + if (/wmic os get Caption/.test(error.message)) { + return "Windows "; + } - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } + return ""; + } +} - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } +/***/ }), - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js +/***/ 813: +/***/ (function(__unusedmodule, exports) { - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} +"use strict"; -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ + +Object.defineProperty(exports, '__esModule', { value: true }); + +async function auth(token) { + const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} /** - * Create AbortError instance + * Prefix token for usage in the Authorization header * - * @param String message Error message for human - * @return AbortError + * @param token OAuth token or JSON Web Token */ -function AbortError(message) { - Error.call(this, message); +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } - this.type = 'aborted'; - this.message = message; + return `token ${token}`; +} - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); } -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map - Body.Promise = fetch.Promise; - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); +/***/ }), - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; +/***/ 814: +/***/ (function(module, __unusedexports, __webpack_require__) { - let response = null; +module.exports = which +which.sync = whichSync - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; +var isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' - if (signal && signal.aborted) { - abort(); - return; - } +var path = __webpack_require__(622) +var COLON = isWindows ? ';' : ':' +var isexe = __webpack_require__(742) - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; +function getNotFoundError (cmd) { + var er = new Error('not found: ' + cmd) + er.code = 'ENOENT' + + return er +} + +function getPathInfo (cmd, opt) { + var colon = opt.colon || COLON + var pathEnv = opt.path || process.env.PATH || '' + var pathExt = [''] + + pathEnv = pathEnv.split(colon) + + var pathExtExe = '' + if (isWindows) { + pathEnv.unshift(process.cwd()) + pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') + pathExt = pathExtExe.split(colon) + + + // Always test the cmd itself first. isexe will check to make sure + // it's found in the pathExt set. + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') + } + + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) + pathEnv = [''] - // send request - const req = send(options); - let reqTimeout; + return { + env: pathEnv, + ext: pathExt, + extExe: pathExtExe + } +} - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } +function which (cmd, opt, cb) { + if (typeof opt === 'function') { + cb = opt + opt = {} + } - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] + + ;(function F (i, l) { + if (i === l) { + if (opt.all && found.length) + return cb(null, found) + else + return cb(getNotFoundError(cmd)) + } - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); + var p = path.join(pathPart, cmd) + if (!pathPart && (/^\.[\\\/]/).test(cmd)) { + p = cmd.slice(0, 2) + p + } + ;(function E (ii, ll) { + if (ii === ll) return F(i + 1, l) + var ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return cb(null, p + ext) + } + return E(ii + 1, ll) + }) + })(0, pathExt.length) + })(0, pathEnv.length) +} - req.on('response', function (res) { - clearTimeout(reqTimeout); +function whichSync (cmd, opt) { + opt = opt || {} - const headers = createHeadersLenient(res.headers); + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); + for (var i = 0, l = pathEnv.length; i < l; i ++) { + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); + var p = path.join(pathPart, cmd) + if (!pathPart && /^\.[\\\/]/.test(cmd)) { + p = cmd.slice(0, 2) + p + } + for (var j = 0, ll = pathExt.length; j < ll; j ++) { + var cur = p + pathExt[j] + var is + try { + is = isexe.sync(cur, { pathExt: pathExtExe }) + if (is) { + if (opt.all) + found.push(cur) + else + return cur + } + } catch (ex) {} + } + } - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } + if (opt.all && found.length) + return found - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } + if (opt.nothrow) + return null - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; + throw getNotFoundError(cmd) +} - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } +/***/ }), - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } +/***/ 816: +/***/ (function(module) { - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); +"use strict"; - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; +module.exports = /^#!.*/; - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - // HTTP-network fetch step 12.1.1.4: handle content codings +/***/ }), - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } +/***/ 818: +/***/ (function(module, __unusedexports, __webpack_require__) { - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; +module.exports = isexe +isexe.sync = sync - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } +var fs = __webpack_require__(747) - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } + if (!pathext) { + return true + } - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true + } + } + return false +} - writeToStream(req, request); - }); +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) } -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; -// expose Promise -fetch.Promise = global.Promise; +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} /***/ }), -/***/ 223: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +/***/ 835: +/***/ (function(module) { -var wrappy = __webpack_require__(940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) +module.exports = require("url"); -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) +/***/ }), - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) +/***/ 842: +/***/ (function(__unusedmodule, exports) { -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} +"use strict"; -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) + +Object.defineProperty(exports, '__esModule', { value: true }); + +const Endpoints = { + actions: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + checkToken: ["POST /applications/{client_id}/token"], + createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] + } + }], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { + mediaType: { + previews: ["machine-man"] + } + }], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app", { + mediaType: { + previews: ["machine-man"] + } + }], + getBySlug: ["GET /apps/{app_slug}", { + mediaType: { + previews: ["machine-man"] + } + }], + getInstallation: ["GET /app/installations/{installation_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + getOrgInstallation: ["GET /orgs/{org}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { + mediaType: { + previews: ["machine-man"] + } + }], + listInstallations: ["GET /app/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listInstallationsForAuthenticatedUser: ["GET /user/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories", { + mediaType: { + previews: ["machine-man"] + } + }], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + createSuite: ["POST /repos/{owner}/{repo}/check-suites", { + mediaType: { + previews: ["antiope"] + } + }], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] + } + }], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { + mediaType: { + previews: ["antiope"] + } + }], + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { + mediaType: { + previews: ["antiope"] + } + }], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { + mediaType: { + previews: ["antiope"] + } + }], + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { + mediaType: { + previews: ["antiope"] + } + }], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { + mediaType: { + previews: ["antiope"] + } + }], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] + } + }] + }, + codeScanning: { + getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getConductCode: ["GET /codes_of_conduct/{key}", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }] + }, + emojis: { + get: ["GET /emojis"] + }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }] + }, + issues: { + addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { + mediaType: { + previews: ["mockingbird"] + } + }], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], + removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: ["POST /markdown/raw", { + headers: { + "content-type": "text/plain; charset=utf-8" + } + }] + }, + meta: { + get: ["GET /meta"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForAuthenticatedUser: ["GET /user/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForOrg: ["GET /orgs/{org}/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForUser: ["GET /user/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], + removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + createCard: ["POST /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + createColumn: ["POST /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + createForAuthenticatedUser: ["POST /user/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForOrg: ["POST /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForRepo: ["POST /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + delete: ["DELETE /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteCard: ["DELETE /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteColumn: ["DELETE /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + get: ["GET /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getCard: ["GET /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getColumn: ["GET /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { + mediaType: { + previews: ["inertia"] + } + }], + listCards: ["GET /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + listCollaborators: ["GET /projects/{project_id}/collaborators", { + mediaType: { + previews: ["inertia"] + } + }], + listColumns: ["GET /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + listForOrg: ["GET /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForRepo: ["GET /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForUser: ["GET /users/{username}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + moveCard: ["POST /projects/columns/cards/{card_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + moveColumn: ["POST /projects/columns/{column_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + update: ["PATCH /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateCard: ["PATCH /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateColumn: ["PATCH /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { + mediaType: { + previews: ["lydian"] + } + }], + updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] + }, + rateLimit: { + get: ["GET /rate_limit"] + }, + reactions: { + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }] + }, + repos: { + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], + addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { + mediaType: { + previews: ["baptiste"] + } + }], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { + mediaType: { + previews: ["groot"] + } + }], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { + mediaType: { + previews: ["groot"] + } + }], + listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], + removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], + updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { + baseUrl: "https://uploads.github.com" + }] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits", { + mediaType: { + previews: ["cloak"] + } + }], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + teams: { + addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], + removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: ["POST /user/emails"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlockedByAuthenticated: ["GET /user/blocks"], + listEmailsForAuthenticated: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - - -/***/ }), - -/***/ 294: -/***/ ((module, __unused_webpack_exports, __webpack_require__) => { +}; -module.exports = __webpack_require__(219); +const VERSION = "4.0.0"; +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; -/***/ }), + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ + method, + url + }, defaults); -/***/ 219: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + if (!newMethods[scope]) { + newMethods[scope] = {}; + } -"use strict"; + const scopeMethods = newMethods[scope]; + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; + } -var net = __webpack_require__(631); -var tls = __webpack_require__(16); -var http = __webpack_require__(605); -var https = __webpack_require__(211); -var events = __webpack_require__(614); -var assert = __webpack_require__(357); -var util = __webpack_require__(669); + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + } + } + return newMethods; +} -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + /* istanbul ignore next */ + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined + }); + return requestWithDefaults(options); + } -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; + if (!(alias in options)) { + options[alias] = options[name]; + } - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; + delete options[name]; + } } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + return requestWithDefaults(options); + } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); + return requestWithDefaults(...args); + } - function onFree() { - self.emit('free', socket, options); - } + return Object.assign(withDecorations, requestWithDefaults); +} - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; +/** + * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary + * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is + * done, we will remove the registerEndpoints methods and return the methods + * directly as with the other plugins. At that point we will also remove the + * legacy workarounds and deprecations. + * + * See the plan at + * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 + */ -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); +function restEndpointMethods(octokit) { + return endpointsToMethods(octokit, Endpoints); +} +restEndpointMethods.VERSION = VERSION; - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } +exports.restEndpointMethods = restEndpointMethods; +//# sourceMappingURL=index.js.map - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } +/***/ }), - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } +/***/ 866: +/***/ (function(module, __unusedexports, __webpack_require__) { - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); +"use strict"; - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } +var shebangRegex = __webpack_require__(816); - function onError(cause) { - connectReq.removeAllListeners(); +module.exports = function (str) { + var match = str.match(shebangRegex); - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; + if (!match) { + return null; + } -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); + var arr = match[0].replace(/#! ?/, '').split(' '); + var bin = arr[0].split('/').pop(); + var arg = arr[1]; - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } + return (bin === 'env' ? + arg : + bin + (arg ? ' ' + arg : '') + ); }; -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); +/***/ }), + +/***/ 881: +/***/ (function(module) { + +"use strict"; + + +const isWin = process.platform === 'win32'; + +function notFoundError(original, syscall) { + return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { + code: 'ENOENT', + errno: 'ENOENT', + syscall: `${syscall} ${original.command}`, + path: original.command, + spawnargs: original.args, + }); } +function hookChildProcess(cp, parsed) { + if (!isWin) { + return; + } -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress + const originalEmit = cp.emit; + + cp.emit = function (name, arg1) { + // If emitting "exit" event and exit code is 1, we need to check if + // the command exists and emit an "error" instead + // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 + if (name === 'exit') { + const err = verifyENOENT(arg1, parsed, 'spawn'); + + if (err) { + return originalEmit.call(cp, 'error', err); + } + } + + return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params }; - } - return host; // for v0.11 or later } -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } +function verifyENOENT(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawn'); } - } - return target; -} + return null; +} -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); +function verifyENOENTSync(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawnSync'); } - console.error.apply(console, args); - } -} else { - debug = function() {}; + + return null; } -exports.debug = debug; // for test + +module.exports = { + hookChildProcess, + verifyENOENT, + verifyENOENTSync, + notFoundError, +}; /***/ }), -/***/ 429: -/***/ ((__unused_webpack_module, exports) => { +/***/ 898: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +Object.defineProperty(exports, '__esModule', { value: true }); -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } +var request = __webpack_require__(753); +var universalUserAgent = __webpack_require__(796); + +const VERSION = "4.5.1"; + +class GraphqlError extends Error { + constructor(request, response) { + const message = response.data.errors[0].message; + super(message); + Object.assign(this, response.data); + this.name = "GraphqlError"; + this.request = request; // Maintains proper stack trace (only available on V8) - if (typeof process === "object" && "version" in process) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } } - return ""; } -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +function graphql(request, query, options) { + options = typeof query === "string" ? options = Object.assign({ + query + }, options) : options = query; + const requestOptions = Object.keys(options).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = options[key]; + return result; + } -/***/ }), + if (!result.variables) { + result.variables = {}; + } -/***/ 940: -/***/ ((module) => { + result.variables[key] = options[key]; + return result; + }, {}); + return request(requestOptions).then(response => { + if (response.data.errors) { + throw new GraphqlError(requestOptions, { + data: response.data + }); + } -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) + return response.data.data; + }); +} - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; - return wrapper + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); +} - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) - } - return ret - } +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); } +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; +//# sourceMappingURL=index.js.map + /***/ }), -/***/ 215: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { +/***/ 948: +/***/ (function(module) { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateDistros = exports.execBashCommand = void 0; -const core = __importStar(__webpack_require__(186)); -const github = __importStar(__webpack_require__(438)); -const tr = __importStar(__webpack_require__(159)); -const io = __importStar(__webpack_require__(436)); -const os = __importStar(__webpack_require__(87)); -const path = __importStar(__webpack_require__(622)); -const fs_1 = __importDefault(__webpack_require__(747)); -// All command line flags passed to curl when invoked as a command. -const curlFlagsArray = [ - // (HTTP) Fail silently (no output at all) on server errors. This is mostly done to better enable - // scripts etc to better deal with failed attempts. In normal cases when a HTTP server fails to - // deliver a document, it returns an HTML document stating so (which often also describes why and - // more). This flag will prevent curl from outputting that and return error 22. - // This method is not fail-safe and there are occasions where non-successful response codes will slip - // through, especially when authentication is involved (response codes 401 and 407). - "--fail", - // Silent or quiet mode. Don't show progress meter or error messages. Makes Curl mute. - "--silent", - // When used with -s it makes curl show an error message if it fails. - "--show-error", - // (HTTP/HTTPS) If the server reports that the requested page has moved to a different location - // (indicated with a Location: header and a 3XX response code), this option will make curl redo the - // request on the new place. If used together with -i, --include or -I, --head, headers from all - // requested pages will be shown. When authentication is used, curl only sends its credentials to the - // initial host. If a redirect takes curl to a different host, it won't be able to intercept the - // user+password. See also --location-trusted on how to change this. You can limit the amount of - // redirects to follow by using the --max-redirs option. - // - // When curl follows a redirect and the request is not a plain GET (for example POST or PUT), it will - // do the following request with a GET if the HTTP response was 301, 302, or 303. If the response - // code was any other 3xx code, curl will re-send the following request using the same unmodified - // method. - "--location", -]; -const validROS1Distros = ["kinetic", "lunar", "melodic", "noetic"]; -const validROS2Distros = ["dashing", "eloquent", "foxy", "rolling"]; -const targetROS1DistroInput = "target-ros1-distro"; -const targetROS2DistroInput = "target-ros2-distro"; -const isLinux = process.platform == "linux"; -const isWindows = process.platform == "win32"; + /** - * Convert local paths to URLs. - * - * The user can pass the VCS repo file either as a URL or a path. - * If it is a path, this function will convert it into a URL (file://...). - * If the file is already passed as an URL, this function does nothing. - * - * @param vcsRepoFileUrl path or URL to the repo file - * @returns URL to the repo file + * Tries to execute a function and discards any error that occurs. + * @param {Function} fn - Function that might or might not throw an error. + * @returns {?*} Return-value of the function when no error occurred. */ -function resolveVcsRepoFileUrl(vcsRepoFileUrl) { - if (fs_1.default.existsSync(vcsRepoFileUrl)) { - return "file://" + path.resolve(vcsRepoFileUrl); +module.exports = function(fn) { + + try { return fn() } catch (e) {} + +} + +/***/ }), + +/***/ 950: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const url = __webpack_require__(835); +function getProxyUrl(reqUrl) { + let usingSsl = reqUrl.protocol === 'https:'; + let proxyUrl; + if (checkBypass(reqUrl)) { + return proxyUrl; + } + let proxyVar; + if (usingSsl) { + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; } else { - return vcsRepoFileUrl; + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; } -} -/** - * Execute a command in bash and wrap the output in a log group. - * - * @param commandLine command to execute (can include additional args). Must be correctly escaped. - * @param commandPrefix optional string used to prefix the command to be executed. - * @param options optional exec options. See ExecOptions - * @param log_message log group title. - * @returns Promise exit code - */ -function execBashCommand(commandLine, commandPrefix, options, log_message) { - return __awaiter(this, void 0, void 0, function* () { - commandPrefix = commandPrefix || ""; - const bashScript = `${commandPrefix}${commandLine}`; - const message = log_message || `Invoking "bash -c '${bashScript}'`; - let toolRunnerCommandLine = ""; - let toolRunnerCommandLineArgs = []; - if (isWindows) { - toolRunnerCommandLine = "C:\\Windows\\system32\\cmd.exe"; - // This passes the same flags to cmd.exe that "run:" in a workflow. - // https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions#using-a-specific-shell - // Except for /D, which disables the AutoRun functionality from command prompt - // and it blocks Python virtual environment activation if one configures it in - // the previous steps. - toolRunnerCommandLineArgs = [ - "/E:ON", - "/V:OFF", - "/S", - "/C", - "call", - "%programfiles(x86)%\\Microsoft Visual Studio\\2019\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat", - "amd64", - "&", - "C:\\Program Files\\Git\\bin\\bash.exe", - "-c", - bashScript, - ]; - } - else { - toolRunnerCommandLine = "bash"; - toolRunnerCommandLineArgs = ["-c", bashScript]; - } - const runner = new tr.ToolRunner(toolRunnerCommandLine, toolRunnerCommandLineArgs, options); - return core.group(message, () => { - return runner.exec(); - }); - }); -} -exports.execBashCommand = execBashCommand; -//Determine whether all inputs name supported ROS distributions. -function validateDistros(ros1Distro, ros2Distro) { - if (!ros1Distro && !ros2Distro) { - core.setFailed(`Neither '${targetROS1DistroInput}' or '${targetROS2DistroInput}' inputs were set, at least one is required.`); - return false; + if (proxyVar) { + proxyUrl = url.parse(proxyVar); } - if (ros1Distro && validROS1Distros.indexOf(ros1Distro) <= -1) { - core.setFailed(`Input ${ros1Distro} was not a valid ROS 1 distribution for '${targetROS1DistroInput}'. Valid values: ${validROS1Distros}`); + return proxyUrl; +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { return false; } - if (ros2Distro && validROS2Distros.indexOf(ros2Distro) <= -1) { - core.setFailed(`Input ${ros2Distro} was not a valid ROS 2 distribution for '${targetROS2DistroInput}'. Valid values: ${validROS2Distros}`); + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { return false; } - return true; -} -exports.validateDistros = validateDistros; -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - const repo = github.context.repo; - const workspace = process.env.GITHUB_WORKSPACE; - const colconMixinName = core.getInput("colcon-mixin-name"); - const colconMixinRepo = core.getInput("colcon-mixin-repository"); - const extraCmakeArgs = core.getInput("extra-cmake-args"); - const colconExtraArgs = core.getInput("colcon-extra-args"); - const importToken = core.getInput("import-token"); - const packageName = core.getInput("package-name", { required: true }); - const packageNameList = packageName.split(RegExp("\\s")); - const rosWorkspaceName = "ros_ws"; - core.setOutput("ros-workspace-directory-name", rosWorkspaceName); - const rosWorkspaceDir = path.join(workspace, rosWorkspaceName); - const targetRos1Distro = core.getInput(targetROS1DistroInput); - const targetRos2Distro = core.getInput(targetROS2DistroInput); - const vcsRepoFileUrlListAsString = core.getInput("vcs-repo-file-url") || ""; - const vcsRepoFileUrlList = vcsRepoFileUrlListAsString.split(RegExp("\\s")); - const vcsRepoFileUrlListNonEmpty = vcsRepoFileUrlList.filter((x) => x != ""); - const vcsRepoFileUrlListResolved = vcsRepoFileUrlListNonEmpty.map((x) => resolveVcsRepoFileUrl(x)); - const coverageIgnorePattern = core.getInput("coverage-ignore-pattern"); - if (!validateDistros(targetRos1Distro, targetRos2Distro)) { - return; - } - // rosdep does not reliably work on Windows, see - // ros-infrastructure/rosdep#610 for instance. So, we do not run it. - if (!isWindows) { - yield execBashCommand("rosdep update"); - } - // Reset colcon configuration. - yield io.rmRF(path.join(os.homedir(), ".colcon")); - // Wipe out the workspace directory to ensure the workspace is always - // identical. - yield io.rmRF(rosWorkspaceDir); - // Checkout ROS 2 from source and install ROS 2 system dependencies - yield io.mkdirP(rosWorkspaceDir + "/src"); - const options = { - cwd: rosWorkspaceDir, - }; - const curlFlags = curlFlagsArray.join(" "); - for (const vcsRepoFileUrl of vcsRepoFileUrlListResolved) { - yield execBashCommand(`curl ${curlFlags} '${vcsRepoFileUrl}' | vcs import --force --recursive src/`, undefined, options); - } - // If the package under tests is part of ros.repos, remove it first. - // We do not want to allow the "default" head state of the package to - // to be present in the workspace, and colcon will fail stating it found twice - // a package with an identical name. - const posixRosWorkspaceDir = isWindows - ? rosWorkspaceDir.replace(/\\/g, "/") - : rosWorkspaceDir; - yield execBashCommand(`find "${posixRosWorkspaceDir}" -type d -and -name "${repo["repo"]}" | xargs rm -rf`); - // The repo file for the repository needs to be generated on-the-fly to - // incorporate the custom repository URL and branch name, when a PR is - // being built. - let repoFullName = process.env.GITHUB_REPOSITORY; - if (github.context.payload.pull_request) { - repoFullName = github.context.payload.pull_request.head.repo.full_name; - } - let tokenAuth = importToken; - if (tokenAuth !== "") { - tokenAuth = tokenAuth.concat("@"); - } - const headRef = process.env.GITHUB_HEAD_REF; - const commitRef = headRef || github.context.sha; - const repoFilePath = path.join(rosWorkspaceDir, "package.repo"); - const repoFileContent = `repositories: - ${repo["repo"]}: - type: git - url: 'https://${tokenAuth}github.com/${repoFullName}.git' - version: '${commitRef}'`; - fs_1.default.writeFileSync(repoFilePath, repoFileContent); - yield execBashCommand("vcs import --force --recursive src/ < package.repo", undefined, options); - // Remove all repositories the package under test does not depend on, to - // avoid having rosdep installing unrequired dependencies. - yield execBashCommand(`diff --new-line-format="" --unchanged-line-format="" <(colcon list -p) <(colcon list --packages-up-to ${packageNameList.join(" ")} -p) | xargs rm -rf`, undefined, options); - // Install ROS dependencies for each distribution being sourced - if (targetRos1Distro) { - yield execBashCommand(`DEBIAN_FRONTEND=noninteractive RTI_NC_LICENSE_ACCEPTED=yes rosdep install -r --from-paths src --ignore-src --rosdistro ${targetRos1Distro} -y || true`, undefined, options); - } - if (targetRos2Distro) { - yield execBashCommand(`DEBIAN_FRONTEND=noninteractive RTI_NC_LICENSE_ACCEPTED=yes rosdep install -r --from-paths src --ignore-src --rosdistro ${targetRos2Distro} -y || true`, undefined, options); - } - if (colconMixinName !== "" && colconMixinRepo !== "") { - yield execBashCommand(`colcon mixin add default '${colconMixinRepo}'`); - yield execBashCommand("colcon mixin update default"); - } - let extra_options = []; - if (colconMixinName !== "") { - extra_options = extra_options.concat(["--mixin", colconMixinName]); - } - if (colconExtraArgs !== "") { - extra_options = extra_options.concat(colconExtraArgs); - } - // Add the future install bin directory to PATH. - // This enables cmake find_package to find packages installed in the - // colcon install directory, even if local_setup.sh has not been sourced. - // - // From the find_package doc: - // https://cmake.org/cmake/help/latest/command/find_package.html - // 5. Search the standard system environment variables. - // Path entries ending in /bin or /sbin are automatically converted to - // their parent directories: - // PATH - // - // ament_cmake should handle this automatically, but we are seeing cases - // where this does not happen. See issue #26 for relevant CI logs. - core.addPath(path.join(rosWorkspaceDir, "install", "bin")); - // Source any installed ROS distributions if they are present - let colconCommandPrefix = ""; - if (isLinux) { - if (targetRos1Distro) { - const ros1SetupPath = `/opt/ros/${targetRos1Distro}/setup.sh`; - if (fs_1.default.existsSync(ros1SetupPath)) { - colconCommandPrefix += `source ${ros1SetupPath} && `; - } - } - if (targetRos2Distro) { - const ros2SetupPath = `/opt/ros/${targetRos2Distro}/setup.sh`; - if (fs_1.default.existsSync(ros2SetupPath)) { - colconCommandPrefix += `source ${ros2SetupPath} && `; - } - } - } - else if (isWindows) { - // Windows only supports ROS2 - if (targetRos2Distro) { - const ros2SetupPath = `c:/dev/${targetRos2Distro}/ros2-windows/setup.bat`; - if (fs_1.default.existsSync(ros2SetupPath)) { - colconCommandPrefix += `${ros2SetupPath} && `; - } - } - } - let colconBuildCmd = [ - `colcon build`, - `--event-handlers console_cohesion+`, - `--packages-up-to ${packageNameList.join(" ")}`, - `${extra_options.join(" ")}`, - `--cmake-args ${extraCmakeArgs}`, - ].join(" "); - if (!isWindows) { - colconBuildCmd = colconBuildCmd.concat(" --symlink-install"); - } - yield execBashCommand(colconBuildCmd, colconCommandPrefix, options); - // ignoreReturnCode is set to true to avoid having a lack of coverage - // data fail the build. - const colconLcovInitialCmd = "colcon lcov-result --initial"; - yield execBashCommand(colconLcovInitialCmd, colconCommandPrefix, { - cwd: rosWorkspaceDir, - ignoreReturnCode: true, - }); - const colconTestCmd = [ - `colcon test`, - `--event-handlers console_cohesion+`, - `--pytest-with-coverage`, - `--return-code-on-test-failure`, - `--packages-select ${packageNameList.join(" ")}`, - `${extra_options.join(" ")}`, - ].join(" "); - yield execBashCommand(colconTestCmd, colconCommandPrefix, options); - // ignoreReturnCode, check comment above in --initial - const colconLcovResultCmd = [ - `colcon lcov-result`, - `--filter ${coverageIgnorePattern}`, - `--packages-select ${packageNameList.join(" ")}`, - ].join(" "); - yield execBashCommand(colconLcovResultCmd, colconCommandPrefix, { - cwd: rosWorkspaceDir, - ignoreReturnCode: true, - }); - const colconCoveragepyResultCmd = [ - `colcon coveragepy-result`, - `--packages-select ${packageNameList.join(" ")}`, - ].join(" "); - yield execBashCommand(colconCoveragepyResultCmd, colconCommandPrefix, options); - core.setOutput("ros-workspace-directory-name", rosWorkspaceName); - } - catch (error) { - core.setFailed(error.message); + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; } - }); + } + return false; } -run(); +exports.checkBypass = checkBypass; /***/ }), -/***/ 877: -/***/ ((module) => { +/***/ 955: +/***/ (function(module, __unusedexports, __webpack_require__) { -module.exports = eval("require")("encoding"); +"use strict"; +const path = __webpack_require__(622); +const childProcess = __webpack_require__(129); +const crossSpawn = __webpack_require__(20); +const stripEof = __webpack_require__(768); +const npmRunPath = __webpack_require__(621); +const isStream = __webpack_require__(323); +const _getStream = __webpack_require__(145); +const pFinally = __webpack_require__(697); +const onExit = __webpack_require__(260); +const errname = __webpack_require__(427); +const stdio = __webpack_require__(168); + +const TEN_MEGABYTES = 1000 * 1000 * 10; + +function handleArgs(cmd, args, opts) { + let parsed; + + opts = Object.assign({ + extendEnv: true, + env: {} + }, opts); + + if (opts.extendEnv) { + opts.env = Object.assign({}, process.env, opts.env); + } -/***/ }), + if (opts.__winShell === true) { + delete opts.__winShell; + parsed = { + command: cmd, + args, + options: opts, + file: cmd, + original: { + cmd, + args + } + }; + } else { + parsed = crossSpawn._parse(cmd, args, opts); + } -/***/ 357: -/***/ ((module) => { + opts = Object.assign({ + maxBuffer: TEN_MEGABYTES, + buffer: true, + stripEof: true, + preferLocal: true, + localDir: parsed.options.cwd || process.cwd(), + encoding: 'utf8', + reject: true, + cleanup: true + }, parsed.options); + + opts.stdio = stdio(opts); + + if (opts.preferLocal) { + opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); + } -"use strict"; -module.exports = require("assert"); + if (opts.detached) { + // #115 + opts.cleanup = false; + } -/***/ }), + if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { + // #116 + parsed.args.unshift('/q'); + } -/***/ 129: -/***/ ((module) => { + return { + cmd: parsed.command, + args: parsed.args, + opts, + parsed + }; +} -"use strict"; -module.exports = require("child_process"); +function handleInput(spawned, input) { + if (input === null || input === undefined) { + return; + } -/***/ }), + if (isStream(input)) { + input.pipe(spawned.stdin); + } else { + spawned.stdin.end(input); + } +} -/***/ 614: -/***/ ((module) => { +function handleOutput(opts, val) { + if (val && opts.stripEof) { + val = stripEof(val); + } -"use strict"; -module.exports = require("events"); + return val; +} -/***/ }), +function handleShell(fn, cmd, opts) { + let file = '/bin/sh'; + let args = ['-c', cmd]; -/***/ 747: -/***/ ((module) => { + opts = Object.assign({}, opts); -"use strict"; -module.exports = require("fs"); + if (process.platform === 'win32') { + opts.__winShell = true; + file = process.env.comspec || 'cmd.exe'; + args = ['/s', '/c', `"${cmd}"`]; + opts.windowsVerbatimArguments = true; + } -/***/ }), + if (opts.shell) { + file = opts.shell; + delete opts.shell; + } -/***/ 605: -/***/ ((module) => { + return fn(file, args, opts); +} -"use strict"; -module.exports = require("http"); +function getStream(process, stream, {encoding, buffer, maxBuffer}) { + if (!process[stream]) { + return null; + } -/***/ }), + let ret; -/***/ 211: -/***/ ((module) => { + if (!buffer) { + // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 + ret = new Promise((resolve, reject) => { + process[stream] + .once('end', resolve) + .once('error', reject); + }); + } else if (encoding) { + ret = _getStream(process[stream], { + encoding, + maxBuffer + }); + } else { + ret = _getStream.buffer(process[stream], {maxBuffer}); + } -"use strict"; -module.exports = require("https"); + return ret.catch(err => { + err.stream = stream; + err.message = `${stream} ${err.message}`; + throw err; + }); +} -/***/ }), +function makeError(result, options) { + const {stdout, stderr} = result; -/***/ 631: -/***/ ((module) => { + let err = result.error; + const {code, signal} = result; -"use strict"; -module.exports = require("net"); + const {parsed, joinedCmd} = options; + const timedOut = options.timedOut || false; -/***/ }), + if (!err) { + let output = ''; -/***/ 87: -/***/ ((module) => { + if (Array.isArray(parsed.opts.stdio)) { + if (parsed.opts.stdio[2] !== 'inherit') { + output += output.length > 0 ? stderr : `\n${stderr}`; + } -"use strict"; -module.exports = require("os"); + if (parsed.opts.stdio[1] !== 'inherit') { + output += `\n${stdout}`; + } + } else if (parsed.opts.stdio !== 'inherit') { + output = `\n${stderr}${stdout}`; + } -/***/ }), + err = new Error(`Command failed: ${joinedCmd}${output}`); + err.code = code < 0 ? errname(code) : code; + } -/***/ 622: -/***/ ((module) => { + err.stdout = stdout; + err.stderr = stderr; + err.failed = true; + err.signal = signal || null; + err.cmd = joinedCmd; + err.timedOut = timedOut; -"use strict"; -module.exports = require("path"); + return err; +} -/***/ }), +function joinCmd(cmd, args) { + let joinedCmd = cmd; -/***/ 413: -/***/ ((module) => { + if (Array.isArray(args) && args.length > 0) { + joinedCmd += ' ' + args.join(' '); + } -"use strict"; -module.exports = require("stream"); + return joinedCmd; +} -/***/ }), +module.exports = (cmd, args, opts) => { + const parsed = handleArgs(cmd, args, opts); + const {encoding, buffer, maxBuffer} = parsed.opts; + const joinedCmd = joinCmd(cmd, args); -/***/ 16: -/***/ ((module) => { + let spawned; + try { + spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); + } catch (err) { + return Promise.reject(err); + } -"use strict"; -module.exports = require("tls"); + let removeExitHandler; + if (parsed.opts.cleanup) { + removeExitHandler = onExit(() => { + spawned.kill(); + }); + } -/***/ }), + let timeoutId = null; + let timedOut = false; -/***/ 835: -/***/ ((module) => { + const cleanup = () => { + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (removeExitHandler) { + removeExitHandler(); + } + }; + + if (parsed.opts.timeout > 0) { + timeoutId = setTimeout(() => { + timeoutId = null; + timedOut = true; + spawned.kill(parsed.opts.killSignal); + }, parsed.opts.timeout); + } + + const processDone = new Promise(resolve => { + spawned.on('exit', (code, signal) => { + cleanup(); + resolve({code, signal}); + }); + + spawned.on('error', err => { + cleanup(); + resolve({error: err}); + }); + + if (spawned.stdin) { + spawned.stdin.on('error', err => { + cleanup(); + resolve({error: err}); + }); + } + }); + + function destroy() { + if (spawned.stdout) { + spawned.stdout.destroy(); + } + + if (spawned.stderr) { + spawned.stderr.destroy(); + } + } + + const handlePromise = () => pFinally(Promise.all([ + processDone, + getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), + getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) + ]).then(arr => { + const result = arr[0]; + result.stdout = arr[1]; + result.stderr = arr[2]; + + if (result.error || result.code !== 0 || result.signal !== null) { + const err = makeError(result, { + joinedCmd, + parsed, + timedOut + }); + + // TODO: missing some timeout logic for killed + // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 + // err.killed = spawned.killed || killed; + err.killed = err.killed || spawned.killed; + + if (!parsed.opts.reject) { + return err; + } + + throw err; + } + + return { + stdout: handleOutput(parsed.opts, result.stdout), + stderr: handleOutput(parsed.opts, result.stderr), + code: 0, + failed: false, + killed: false, + signal: null, + cmd: joinedCmd, + timedOut: false + }; + }), destroy); + + crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); + + handleInput(spawned, parsed.opts.input); + + spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); + spawned.catch = onrejected => handlePromise().catch(onrejected); + + return spawned; +}; + +// TODO: set `stderr: 'ignore'` when that option is implemented +module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); + +// TODO: set `stdout: 'ignore'` when that option is implemented +module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); + +module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); + +module.exports.sync = (cmd, args, opts) => { + const parsed = handleArgs(cmd, args, opts); + const joinedCmd = joinCmd(cmd, args); + + if (isStream(parsed.opts.input)) { + throw new TypeError('The `input` option cannot be a stream in sync mode'); + } + + const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); + result.code = result.status; + + if (result.error || result.status !== 0 || result.signal !== null) { + const err = makeError(result, { + joinedCmd, + parsed + }); + + if (!parsed.opts.reject) { + return err; + } + + throw err; + } + + return { + stdout: handleOutput(parsed.opts, result.stdout), + stderr: handleOutput(parsed.opts, result.stderr), + code: 0, + failed: false, + signal: null, + cmd: joinedCmd, + timedOut: false + }; +}; + +module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); -"use strict"; -module.exports = require("url"); /***/ }), -/***/ 669: -/***/ ((module) => { +/***/ 966: +/***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; -module.exports = require("util"); + +const {PassThrough} = __webpack_require__(794); + +module.exports = options => { + options = Object.assign({}, options); + + const {array} = options; + let {encoding} = options; + const buffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } + + if (buffer) { + encoding = null; + } + + let len = 0; + const ret = []; + const stream = new PassThrough({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + stream.on('data', chunk => { + ret.push(chunk); + + if (objectMode) { + len = ret.length; + } else { + len += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return ret; + } + + return buffer ? Buffer.concat(ret, len) : ret.join(''); + }; + + stream.getBufferedLength = () => len; + + return stream; +}; + /***/ }), -/***/ 761: -/***/ ((module) => { +/***/ 969: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var wrappy = __webpack_require__(11) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} -"use strict"; -module.exports = require("zlib"); /***/ }) -/******/ }); -/************************************************************************/ -/******/ // The module cache -/******/ var __webpack_module_cache__ = {}; -/******/ -/******/ // The require function -/******/ function __webpack_require__(moduleId) { -/******/ // Check if module is in cache -/******/ if(__webpack_module_cache__[moduleId]) { -/******/ return __webpack_module_cache__[moduleId].exports; -/******/ } -/******/ // Create a new module (and put it into the cache) -/******/ var module = __webpack_module_cache__[moduleId] = { -/******/ // no module.id needed -/******/ // no module.loaded needed -/******/ exports: {} -/******/ }; -/******/ -/******/ // Execute the module function -/******/ var threw = true; -/******/ try { -/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__); -/******/ threw = false; -/******/ } finally { -/******/ if(threw) delete __webpack_module_cache__[moduleId]; -/******/ } -/******/ -/******/ // Return the exports of the module -/******/ return module.exports; -/******/ } -/******/ -/************************************************************************/ -/******/ /* webpack/runtime/compat */ -/******/ -/******/ __webpack_require__.ab = __dirname + "/";/************************************************************************/ -/******/ // module exports must be returned from runtime so entry inlining is disabled -/******/ // startup -/******/ // Load entry module and return exports -/******/ return __webpack_require__(215); -/******/ })() -; \ No newline at end of file +/******/ }); \ No newline at end of file diff --git a/src/action-ros-ci.ts b/src/action-ros-ci.ts index 7a9cf7829..af9c73aec 100644 --- a/src/action-ros-ci.ts +++ b/src/action-ros-ci.ts @@ -85,25 +85,17 @@ export async function execBashCommand( let toolRunnerCommandLine = ""; let toolRunnerCommandLineArgs: string[] = []; if (isWindows) { - toolRunnerCommandLine = "C:\\Windows\\system32\\cmd.exe"; - // This passes the same flags to cmd.exe that "run:" in a workflow. - // https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions#using-a-specific-shell - // Except for /D, which disables the AutoRun functionality from command prompt - // and it blocks Python virtual environment activation if one configures it in - // the previous steps. - toolRunnerCommandLineArgs = [ - "/E:ON", - "/V:OFF", - "/S", - "/C", - "call", - "%programfiles(x86)%\\Microsoft Visual Studio\\2019\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat", - "amd64", - "&", - "C:\\Program Files\\Git\\bin\\bash.exe", - "-c", - bashScript, - ]; + const which_msbuild = await io.which("msbuild"); + if (!which_msbuild) { + //todo: mutate instead of copy + options = options || {}; + options.env = options.env || {}; + options.env.PATH = + (options.env.PATH + ";" || "") + + "%programfiles(x86)%Microsoft Visual Studio\\2019\\Enterprise\\MSBuild\\Current\\Bin"; + } + toolRunnerCommandLine = "C:\\Program Files\\Git\\bin\\bash.exe"; + toolRunnerCommandLineArgs = ["-c", bashScript]; } else { toolRunnerCommandLine = "bash"; toolRunnerCommandLineArgs = ["-c", bashScript];