diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 341a3fee9da0e6..f06c47f22efebc 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -160,7 +160,7 @@

Description

the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

-
npm@8.3.2 /path/to/npm
+
npm@8.4.0 /path/to/npm
 └─┬ init-package-json@0.0.4
   └── promzard@0.1.5
 
diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index ab49c472ab6075..a3a5a3dca202e3 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -149,7 +149,7 @@

Table of contents

npm <command> [args]
 

Version

-

8.3.2

+

8.4.0

Description

npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/lib/commands/logout.js b/deps/npm/lib/commands/logout.js index 4e6bab9859551c..aea5e93652b0e8 100644 --- a/deps/npm/lib/commands/logout.js +++ b/deps/npm/lib/commands/logout.js @@ -1,4 +1,4 @@ -const getAuth = require('npm-registry-fetch/auth.js') +const getAuth = require('npm-registry-fetch/lib/auth.js') const npmFetch = require('npm-registry-fetch') const log = require('../utils/log-shim') const BaseCommand = require('../base-command.js') diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 038eb5e72ddd03..2c970ac0a37a10 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf -npm@8\.3\.2 /path/to/npm +npm@8\.4\.0 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index bdb9becfe23402..ca601284ed2e47 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -10,7 +10,7 @@ npm [args] .RE .SS Version .P -8\.3\.2 +8\.4\.0 .SS Description .P npm is the package manager for the Node JavaScript platform\. It puts diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index f20a554bd5ee8e..0375e1851495a1 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -269,6 +269,22 @@ module.exports = cls => class IdealTreeBuilder extends cls { this[_complete] = !!options.complete this[_preferDedupe] = !!options.preferDedupe this[_legacyBundling] = !!options.legacyBundling + + // validates list of update names, they must + // be dep names only, no semver ranges are supported + for (const name of update.names) { + const spec = npa(name) + const validationError = + new TypeError(`Update arguments must not contain package version specifiers + +Try using the package name instead, e.g: + npm update ${spec.name}`) + validationError.code = 'EUPDATEARGS' + + if (spec.fetchSpec !== 'latest') { + throw validationError + } + } this[_updateNames] = update.names this[_updateAll] = update.all @@ -320,7 +336,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { // Load on a new Arborist object, so the Nodes aren't the same, // or else it'll get super confusing when we change them! .then(async root => { - if (!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk) { + if ((!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk) || (this[_global] && this[_updateNames].length)) { await new this.constructor(this.options).loadActual({ root }) const tree = root.target // even though we didn't load it from a package-lock.json FILE, diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js index 0d260858d81c6b..c06ed80265e027 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -212,7 +212,8 @@ module.exports = cls => class ActualLoader extends cls { const promises = [] for (const path of tree.workspaces.values()) { if (!this[_cache].has(path)) { - const p = this[_loadFSNode]({ path, root: this[_actualTree] }) + // workspace overrides use the root overrides + const p = this[_loadFSNode]({ path, root: this[_actualTree], useRootOverrides: true }) .then(node => this[_loadFSTree](node)) promises.push(p) } @@ -240,7 +241,7 @@ module.exports = cls => class ActualLoader extends cls { this[_actualTree] = root } - [_loadFSNode] ({ path, parent, real, root, loadOverrides }) { + [_loadFSNode] ({ path, parent, real, root, loadOverrides, useRootOverrides }) { if (!real) { return realpath(path, this[_rpcache], this[_stcache]) .then( @@ -250,6 +251,7 @@ module.exports = cls => class ActualLoader extends cls { real, root, loadOverrides, + useRootOverrides, }), // if realpath fails, just provide a dummy error node error => new Node({ @@ -289,6 +291,9 @@ module.exports = cls => class ActualLoader extends cls { parent, root, loadOverrides, + ...(useRootOverrides && root.overrides + ? { overrides: root.overrides.getNodeRule({ name: pkg.name, version: pkg.version }) } + : {}), }) }) .then(node => { diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js index d5e70323830b61..45ef93985358b4 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -5,6 +5,7 @@ const pacote = require('pacote') const AuditReport = require('../audit-report.js') const { subset, intersects } = require('semver') const npa = require('npm-package-arg') +const semver = require('semver') const debug = require('../debug.js') const walkUp = require('walk-up-path') @@ -1273,6 +1274,21 @@ module.exports = cls => class Reifier extends cls { } } + // Returns true if any of the edges from this node has a semver + // range definition that is an exact match to the version installed + // e.g: should return true if for a given an installed version 1.0.0, + // range is either =1.0.0 or 1.0.0 + const exactVersion = node => { + for (const edge of node.edgesIn) { + try { + if (semver.subset(edge.spec, node.version)) { + return false + } + } catch {} + } + return true + } + // helper that retrieves an array of nodes that were // potentially updated during the reify process, in order // to limit the number of nodes to check and update, only @@ -1284,6 +1300,8 @@ module.exports = cls => class Reifier extends cls { const filterDirectDependencies = node => !node.isRoot && node.resolveParent.isRoot && (!names || names.includes(node.name)) + && exactVersion(node) // skip update for exact ranges + const directDeps = this.idealTree.inventory .filter(filterDirectDependencies) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js index b45fea0ac61112..bb6971f7ad57ad 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js @@ -476,8 +476,13 @@ class Shrinkwrap { // all good! hidden lockfile is the newest thing in here. return data }).catch(er => { - const rel = relpath(this.path, this.filename) - this.log.verbose('shrinkwrap', `failed to load ${rel}`, er) + /* istanbul ignore else */ + if (typeof this.filename === 'string') { + const rel = relpath(this.path, this.filename) + this.log.verbose('shrinkwrap', `failed to load ${rel}`, er) + } else { + this.log.verbose('shrinkwrap', `failed to load ${this.path}`, er) + } this.loadingError = er this.loadedFromDisk = false this.ancientLockfile = false diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 493a0a78c5c465..5c33f71678a705 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "4.2.1", + "version": "4.3.0", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", @@ -22,7 +22,7 @@ "npm-install-checks": "^4.0.0", "npm-package-arg": "^8.1.5", "npm-pick-manifest": "^6.1.0", - "npm-registry-fetch": "^11.0.0", + "npm-registry-fetch": "^12.0.1", "pacote": "^12.0.2", "parse-conflict-json": "^2.0.1", "proc-log": "^1.0.0", diff --git a/deps/npm/node_modules/@tootallnate/once/LICENSE b/deps/npm/node_modules/@tootallnate/once/LICENSE new file mode 100644 index 00000000000000..c4c56a2a53b2fe --- /dev/null +++ b/deps/npm/node_modules/@tootallnate/once/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/deps/npm/node_modules/@tootallnate/once/dist/index.d.ts b/deps/npm/node_modules/@tootallnate/once/dist/index.d.ts index a7efe943b2acbc..93d02a9a348b50 100644 --- a/deps/npm/node_modules/@tootallnate/once/dist/index.d.ts +++ b/deps/npm/node_modules/@tootallnate/once/dist/index.d.ts @@ -1,14 +1,7 @@ /// import { EventEmitter } from 'events'; -declare function once(emitter: EventEmitter, name: string): once.CancelablePromise; -declare namespace once { - interface CancelFunction { - (): void; - } - interface CancelablePromise extends Promise { - cancel: CancelFunction; - } - type CancellablePromise = CancelablePromise; - function spread(emitter: EventEmitter, name: string): once.CancelablePromise; +import { EventNames, EventListenerParameters, AbortSignal } from './types'; +export interface OnceOptions { + signal?: AbortSignal; } -export = once; +export default function once>(emitter: Emitter, name: Event, { signal }?: OnceOptions): Promise>; diff --git a/deps/npm/node_modules/@tootallnate/once/dist/index.js b/deps/npm/node_modules/@tootallnate/once/dist/index.js index bfd0dc88f758b8..ca6385b1b82f88 100644 --- a/deps/npm/node_modules/@tootallnate/once/dist/index.js +++ b/deps/npm/node_modules/@tootallnate/once/dist/index.js @@ -1,39 +1,24 @@ "use strict"; -function noop() { } -function once(emitter, name) { - const o = once.spread(emitter, name); - const r = o.then((args) => args[0]); - r.cancel = o.cancel; - return r; -} -(function (once) { - function spread(emitter, name) { - let c = null; - const p = new Promise((resolve, reject) => { - function cancel() { - emitter.removeListener(name, onEvent); - emitter.removeListener('error', onError); - p.cancel = noop; - } - function onEvent(...args) { - cancel(); - resolve(args); - } - function onError(err) { - cancel(); - reject(err); - } - c = cancel; - emitter.on(name, onEvent); - emitter.on('error', onError); - }); - if (!c) { - throw new TypeError('Could not get `cancel()` function'); +Object.defineProperty(exports, "__esModule", { value: true }); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); } - p.cancel = c; - return p; - } - once.spread = spread; -})(once || (once = {})); -module.exports = once; + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports.default = once; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@tootallnate/once/dist/index.js.map b/deps/npm/node_modules/@tootallnate/once/dist/index.js.map index 30d20491dbca83..61708ca07f1b09 100644 --- a/deps/npm/node_modules/@tootallnate/once/dist/index.js.map +++ b/deps/npm/node_modules/@tootallnate/once/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAEA,SAAS,IAAI,KAAI,CAAC;AAElB,SAAS,IAAI,CACZ,OAAqB,EACrB,IAAY;IAEZ,MAAM,CAAC,GAAG,IAAI,CAAC,MAAM,CAAM,OAAO,EAAE,IAAI,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAA8B,CAAC;IACtE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC;IACpB,OAAO,CAAC,CAAC;AACV,CAAC;AAED,WAAU,IAAI;IAWb,SAAgB,MAAM,CACrB,OAAqB,EACrB,IAAY;QAEZ,IAAI,CAAC,GAA+B,IAAI,CAAC;QACzC,MAAM,CAAC,GAAG,IAAI,OAAO,CAAI,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC5C,SAAS,MAAM;gBACd,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;gBACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;gBACzC,CAAC,CAAC,MAAM,GAAG,IAAI,CAAC;YACjB,CAAC;YACD,SAAS,OAAO,CAAC,GAAG,IAAW;gBAC9B,MAAM,EAAE,CAAC;gBACT,OAAO,CAAC,IAAS,CAAC,CAAC;YACpB,CAAC;YACD,SAAS,OAAO,CAAC,GAAU;gBAC1B,MAAM,EAAE,CAAC;gBACT,MAAM,CAAC,GAAG,CAAC,CAAC;YACb,CAAC;YACD,CAAC,GAAG,MAAM,CAAC;YACX,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC9B,CAAC,CAA8B,CAAC;QAChC,IAAI,CAAC,CAAC,EAAE;YACP,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC,CAAC;SACzD;QACD,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;QACb,OAAO,CAAC,CAAC;IACV,CAAC;IA5Be,WAAM,SA4BrB,CAAA;AACF,CAAC,EAxCS,IAAI,KAAJ,IAAI,QAwCb;AAED,iBAAS,IAAI,CAAC"} \ No newline at end of file +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAOA,SAAwB,IAAI,CAI3B,OAAgB,EAChB,IAAW,EACX,EAAE,MAAM,KAAkB,EAAE;IAE5B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,SAAS,OAAO;YACf,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YAC9C,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC1C,CAAC;QACD,SAAS,OAAO,CAAC,GAAG,IAAW;YAC9B,OAAO,EAAE,CAAC;YACV,OAAO,CAAC,IAA+C,CAAC,CAAC;QAC1D,CAAC;QACD,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QACD,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC3C,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACJ,CAAC;AA1BD,uBA0BC"} \ No newline at end of file diff --git a/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts b/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts new file mode 100644 index 00000000000000..eb2bbc6c6275ec --- /dev/null +++ b/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts @@ -0,0 +1,231 @@ +export declare type OverloadedParameters = T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; + (...args: infer A19): any; + (...args: infer A20): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 | A20 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; + (...args: infer A19): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; +} ? A1 | A2 | A3 | A4 | A5 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; +} ? A1 | A2 | A3 | A4 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; +} ? A1 | A2 | A3 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; +} ? A1 | A2 : T extends { + (...args: infer A1): any; +} ? A1 : any; diff --git a/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.js b/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.js new file mode 100644 index 00000000000000..207186d9e7cca0 --- /dev/null +++ b/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=overloaded-parameters.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map b/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map new file mode 100644 index 00000000000000..863f146d625f6c --- /dev/null +++ b/deps/npm/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"overloaded-parameters.js","sourceRoot":"","sources":["../src/overloaded-parameters.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/deps/npm/node_modules/@tootallnate/once/dist/types.d.ts b/deps/npm/node_modules/@tootallnate/once/dist/types.d.ts new file mode 100644 index 00000000000000..58be8284ab8d3e --- /dev/null +++ b/deps/npm/node_modules/@tootallnate/once/dist/types.d.ts @@ -0,0 +1,17 @@ +/// +import { EventEmitter } from 'events'; +import { OverloadedParameters } from './overloaded-parameters'; +export declare type FirstParameter = T extends [infer R, ...any[]] ? R : never; +export declare type EventListener = F extends [ + T, + infer R, + ...any[] +] ? R : never; +export declare type EventParameters = OverloadedParameters; +export declare type EventNames = FirstParameter>; +export declare type EventListenerParameters> = WithDefault, Event>>, unknown[]>; +export declare type WithDefault = [T] extends [never] ? D : T; +export interface AbortSignal { + addEventListener: (name: string, listener: (...args: any[]) => any) => void; + removeEventListener: (name: string, listener: (...args: any[]) => any) => void; +} diff --git a/deps/npm/node_modules/@tootallnate/once/dist/types.js b/deps/npm/node_modules/@tootallnate/once/dist/types.js new file mode 100644 index 00000000000000..11e638d1ee44ae --- /dev/null +++ b/deps/npm/node_modules/@tootallnate/once/dist/types.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@tootallnate/once/dist/types.js.map b/deps/npm/node_modules/@tootallnate/once/dist/types.js.map new file mode 100644 index 00000000000000..c768b79002615c --- /dev/null +++ b/deps/npm/node_modules/@tootallnate/once/dist/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/deps/npm/node_modules/@tootallnate/once/package.json b/deps/npm/node_modules/@tootallnate/once/package.json index 8343f9fad73aba..69ce947d9c3103 100644 --- a/deps/npm/node_modules/@tootallnate/once/package.json +++ b/deps/npm/node_modules/@tootallnate/once/package.json @@ -1,6 +1,6 @@ { "name": "@tootallnate/once", - "version": "1.1.2", + "version": "2.0.0", "description": "Creates a Promise that waits for a single event", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -10,8 +10,7 @@ "scripts": { "prebuild": "rimraf dist", "build": "tsc", - "test": "mocha --reporter spec", - "test-lint": "eslint src --ext .js,.ts", + "test": "jest", "prepublishOnly": "npm run build" }, "repository": { @@ -25,21 +24,29 @@ "url": "https://github.com/TooTallNate/once/issues" }, "devDependencies": { + "@types/jest": "^27.0.2", "@types/node": "^12.12.11", - "@typescript-eslint/eslint-plugin": "1.6.0", - "@typescript-eslint/parser": "1.1.0", - "eslint": "5.16.0", - "eslint-config-airbnb": "17.1.0", - "eslint-config-prettier": "4.1.0", - "eslint-import-resolver-typescript": "1.1.1", - "eslint-plugin-import": "2.16.0", - "eslint-plugin-jsx-a11y": "6.2.1", - "eslint-plugin-react": "7.12.4", - "mocha": "^6.2.2", + "abort-controller": "^3.0.0", + "jest": "^27.2.1", "rimraf": "^3.0.0", - "typescript": "^3.7.3" + "ts-jest": "^27.0.5", + "typescript": "^4.4.3" }, "engines": { - "node": ">= 6" + "node": ">= 10" + }, + "jest": { + "preset": "ts-jest", + "globals": { + "ts-jest": { + "diagnostics": false, + "isolatedModules": true + } + }, + "verbose": false, + "testEnvironment": "node", + "testMatch": [ + "/test/**/*.test.ts" + ] } } diff --git a/deps/npm/node_modules/http-proxy-agent/dist/agent.js b/deps/npm/node_modules/http-proxy-agent/dist/agent.js index 02528505168192..aca82804314882 100644 --- a/deps/npm/node_modules/http-proxy-agent/dist/agent.js +++ b/deps/npm/node_modules/http-proxy-agent/dist/agent.js @@ -18,7 +18,7 @@ const url_1 = __importDefault(require("url")); const debug_1 = __importDefault(require("debug")); const once_1 = __importDefault(require("@tootallnate/once")); const agent_base_1 = require("agent-base"); -const debug = debug_1.default('http-proxy-agent'); +const debug = (0, debug_1.default)('http-proxy-agent'); function isHTTPS(protocol) { return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; } @@ -86,7 +86,7 @@ class HttpProxyAgent extends agent_base_1.Agent { if (parsed.port === '80') { // if port is 80, then we can remove the port so that the // ":80" portion is not on the produced URL - delete parsed.port; + parsed.port = ''; } // Change the `http.ClientRequest` instance's "path" field // to the absolute path of the URL that will be requested. @@ -136,7 +136,7 @@ class HttpProxyAgent extends agent_base_1.Agent { // function throws instead of the `http` request machinery. This is // important for i.e. `PacProxyAgent` which determines a failed proxy // connection via the `callback()` function throwing. - yield once_1.default(socket, 'connect'); + yield (0, once_1.default)(socket, 'connect'); return socket; }); } diff --git a/deps/npm/node_modules/http-proxy-agent/dist/agent.js.map b/deps/npm/node_modules/http-proxy-agent/dist/agent.js.map index 7a407620d8e50a..bd3b56aa6dfdbc 100644 --- a/deps/npm/node_modules/http-proxy-agent/dist/agent.js.map +++ b/deps/npm/node_modules/http-proxy-agent/dist/agent.js.map @@ -1 +1 @@ -{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,eAAW,CAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,OAAO,MAAM,CAAC,IAAI,CAAC;aACnB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,cAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"} \ No newline at end of file +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,MAAM,CAAC,IAAI,GAAG,EAAE,CAAC;aACjB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,IAAA,cAAI,EAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"} \ No newline at end of file diff --git a/deps/npm/node_modules/http-proxy-agent/package.json b/deps/npm/node_modules/http-proxy-agent/package.json index 870dd5d8af267a..659d6e11e80e40 100644 --- a/deps/npm/node_modules/http-proxy-agent/package.json +++ b/deps/npm/node_modules/http-proxy-agent/package.json @@ -1,6 +1,6 @@ { "name": "http-proxy-agent", - "version": "4.0.1", + "version": "5.0.0", "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -30,13 +30,13 @@ "url": "https://github.com/TooTallNate/node-http-proxy-agent/issues" }, "dependencies": { - "@tootallnate/once": "1", + "@tootallnate/once": "2", "agent-base": "6", "debug": "4" }, "devDependencies": { "@types/debug": "4", - "@types/node": "^12.12.11", + "@types/node": "^12.19.2", "@typescript-eslint/eslint-plugin": "1.6.0", "@typescript-eslint/parser": "1.1.0", "eslint": "5.16.0", @@ -49,7 +49,7 @@ "mocha": "^6.2.2", "proxy": "1", "rimraf": "^3.0.0", - "typescript": "^3.5.3" + "typescript": "^4.4.3" }, "engines": { "node": ">= 6" diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json index 8d2ba3ad765fd2..760da6cc1be870 100644 --- a/deps/npm/node_modules/libnpmaccess/package.json +++ b/deps/npm/node_modules/libnpmaccess/package.json @@ -1,6 +1,6 @@ { "name": "libnpmaccess", - "version": "5.0.0", + "version": "5.0.1", "description": "programmatic library for `npm access` commands", "author": "GitHub Inc.", "license": "ISC", @@ -32,7 +32,7 @@ "aproba": "^2.0.0", "minipass": "^3.1.1", "npm-package-arg": "^8.1.2", - "npm-registry-fetch": "^11.0.0" + "npm-registry-fetch": "^12.0.1" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16" diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index ff728b5473bc96..1de0cdfe26a956 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,6 +1,6 @@ { "name": "libnpmexec", - "version": "3.0.2", + "version": "3.0.3", "files": [ "bin", "lib" diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json index a46de40ac9828a..4f305552732050 100644 --- a/deps/npm/node_modules/libnpmhook/package.json +++ b/deps/npm/node_modules/libnpmhook/package.json @@ -1,6 +1,6 @@ { "name": "libnpmhook", - "version": "7.0.0", + "version": "7.0.1", "description": "programmatic API for managing npm registry hooks", "main": "lib/index.js", "files": [ @@ -34,7 +34,7 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^11.0.0" + "npm-registry-fetch": "^12.0.1" }, "devDependencies": { "@npmcli/template-oss": "^2.4.2", diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json index 93297c36338d27..5c4909b1c95055 100644 --- a/deps/npm/node_modules/libnpmorg/package.json +++ b/deps/npm/node_modules/libnpmorg/package.json @@ -1,6 +1,6 @@ { "name": "libnpmorg", - "version": "3.0.0", + "version": "3.0.1", "description": "Programmatic api for `npm org` commands", "author": "GitHub Inc.", "main": "lib/index.js", @@ -45,7 +45,7 @@ "homepage": "https://npmjs.com/package/libnpmorg", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^11.0.0" + "npm-registry-fetch": "^12.0.1" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16" diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json index 156503af7d3dd8..3fd2d6d5a39617 100644 --- a/deps/npm/node_modules/libnpmpublish/package.json +++ b/deps/npm/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "5.0.0", + "version": "5.0.1", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "GitHub Inc.", "main": "lib/index.js", @@ -37,14 +37,14 @@ }, "repository": { "type": "git", - "url": "https://github.com/npm/libnpmpublish.git" + "url": "https://github.com/npm/cli.git" }, - "bugs": "https://github.com/npm/libnpmpublish/issues", + "bugs": "https://github.com/npm/cli/issues", "homepage": "https://npmjs.com/package/libnpmpublish", "dependencies": { "normalize-package-data": "^3.0.2", "npm-package-arg": "^8.1.2", - "npm-registry-fetch": "^11.0.0", + "npm-registry-fetch": "^12.0.1", "semver": "^7.1.3", "ssri": "^8.0.1" }, diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json index 5479e41ae3c3d1..f524426dc65f84 100644 --- a/deps/npm/node_modules/libnpmsearch/package.json +++ b/deps/npm/node_modules/libnpmsearch/package.json @@ -1,6 +1,6 @@ { "name": "libnpmsearch", - "version": "4.0.0", + "version": "4.0.1", "description": "Programmatic API for searching in npm and compatible registries.", "author": "GitHub Inc.", "main": "lib/index.js", @@ -41,7 +41,7 @@ "bugs": "https://github.com/npm/libnpmsearch/issues", "homepage": "https://npmjs.com/package/libnpmsearch", "dependencies": { - "npm-registry-fetch": "^11.0.0" + "npm-registry-fetch": "^12.0.1" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16" diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json index 1264402321ee75..23903551aa8c1b 100644 --- a/deps/npm/node_modules/libnpmteam/package.json +++ b/deps/npm/node_modules/libnpmteam/package.json @@ -1,7 +1,7 @@ { "name": "libnpmteam", "description": "npm Team management APIs", - "version": "3.0.0", + "version": "3.0.1", "author": "GitHub Inc.", "license": "ISC", "main": "lib/index.js", @@ -32,7 +32,7 @@ "homepage": "https://npmjs.com/package/libnpmteam", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^11.0.0" + "npm-registry-fetch": "^12.0.1" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16" diff --git a/deps/npm/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/make-fetch-happen/LICENSE index 8d28acf866d932..1808eb2844231c 100644 --- a/deps/npm/node_modules/make-fetch-happen/LICENSE +++ b/deps/npm/node_modules/make-fetch-happen/LICENSE @@ -1,6 +1,6 @@ ISC License -Copyright (c) npm, Inc. +Copyright 2017-2022 (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the diff --git a/deps/npm/node_modules/make-fetch-happen/lib/agent.js b/deps/npm/node_modules/make-fetch-happen/lib/agent.js index 3675dd8ae981a9..095c35c5a25230 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/agent.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/agent.js @@ -50,11 +50,13 @@ function getAgent (uri, opts) { : isHttps ? require('https').globalAgent : require('http').globalAgent - if (isLambda && !pxuri) + if (isLambda && !pxuri) { return lambdaAgent + } - if (AGENT_CACHE.peek(key)) + if (AGENT_CACHE.peek(key)) { return AGENT_CACHE.get(key) + } if (pxuri) { const pxopts = isLambda ? { @@ -86,16 +88,19 @@ function getAgent (uri, opts) { function checkNoProxy (uri, opts) { const host = new url.URL(uri).hostname.split('.').reverse() let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) - if (typeof noproxy === 'string') + if (typeof noproxy === 'string') { noproxy = noproxy.split(/\s*,\s*/g) + } return noproxy && noproxy.some(no => { const noParts = no.split('.').filter(x => x).reverse() - if (!noParts.length) + if (!noParts.length) { return false + } for (let i = 0; i < noParts.length; i++) { - if (host[i] !== noParts[i]) + if (host[i] !== noParts[i]) { return false + } } return true }) @@ -104,8 +109,9 @@ function checkNoProxy (uri, opts) { module.exports.getProcessEnv = getProcessEnv function getProcessEnv (env) { - if (!env) + if (!env) { return + } let value @@ -114,8 +120,9 @@ function getProcessEnv (env) { value = process.env[e] || process.env[e.toUpperCase()] || process.env[e.toLowerCase()] - if (typeof value !== 'undefined') + if (typeof value !== 'undefined') { break + } } } @@ -141,8 +148,9 @@ function getProxyUri (uri, opts) { protocol === 'http:' && getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) ) - if (!proxy) + if (!proxy) { return null + } const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy @@ -177,13 +185,14 @@ function getProxy (proxyUrl, opts, isHttps) { } if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { - if (!isHttps) + if (!isHttps) { return new HttpProxyAgent(popts) - else + } else { return new HttpsProxyAgent(popts) - } else if (proxyUrl.protocol.startsWith('socks')) + } + } else if (proxyUrl.protocol.startsWith('socks')) { return new SocksProxyAgent(popts) - else { + } else { throw Object.assign( new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), { diff --git a/deps/npm/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/make-fetch-happen/lib/cache/entry.js index a2acea156ee6f5..ae2ad8c7667f26 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/cache/entry.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/cache/entry.js @@ -52,23 +52,31 @@ const getMetadata = (request, response, options) => { url: request.url, reqHeaders: {}, resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, } // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) + if (response.status !== 200 && response.status !== 304) { metadata.status = response.status + } for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) + if (request.headers.has(name)) { metadata.reqHeaders[name] = request.headers.get(name) + } } // if the request's host header differs from the host in the url // we need to keep it, otherwise it's just noise and we ignore it const host = request.headers.get('host') const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) + if (host && parsedUrl.host !== host) { metadata.reqHeaders.host = host + } // if the response has a vary header, make sure // we store the relevant request headers too @@ -82,25 +90,17 @@ const getMetadata = (request, response, options) => { // copy any other request headers that will vary the response const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) for (const name of varyHeaders) { - // explicitly ignore accept-encoding here - if (name !== 'accept-encoding' && request.headers.has(name)) + if (request.headers.has(name)) { metadata.reqHeaders[name] = request.headers.get(name) + } } } } for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) + if (response.headers.has(name)) { metadata.resHeaders[name] = response.headers.get(name) - } - - // we only store accept-encoding and content-encoding if the user - // has disabled automatic compression and decompression in minipass-fetch - // since if it's enabled (the default) then the content will have - // already been decompressed making the header a lie - if (options.compress === false) { - metadata.reqHeaders['accept-encoding'] = request.headers.get('accept-encoding') - metadata.resHeaders['content-encoding'] = response.headers.get('content-encoding') + } } return metadata @@ -121,8 +121,9 @@ class CacheEntry { // entry timestamp to determine staleness because cacache will update it // when it verifies its data this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else + } else { this.key = cacheKey(request) + } this.options = options @@ -143,9 +144,17 @@ class CacheEntry { return entryA.policy.satisfies(entryB.request) }, { validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) + if (entry.integrity === null) { return !!(entry.metadata && entry.metadata.status) + } return true }, @@ -158,8 +167,9 @@ class CacheEntry { // a cache mode of 'reload' means to behave as though we have no cache // on the way to the network. return undefined to allow cacheFetch to // create a brand new request no matter what. - if (options.cache === 'reload') + if (options.cache === 'reload') { return + } // find the specific entry that satisfies the request let match @@ -194,6 +204,7 @@ class CacheEntry { this[_request] = new Request(this.entry.metadata.url, { method: 'GET', headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, }) } @@ -235,7 +246,11 @@ class CacheEntry { // if we got a status other than 200, 301, or 308, // or the CachePolicy forbid storage, append the // cache status header and return it untouched - if (this.request.method !== 'GET' || ![200, 301, 308].includes(this.response.status) || !this.policy.storable()) { + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { this.response.headers.set('x-local-cache-status', 'skip') return this.response } @@ -276,7 +291,8 @@ class CacheEntry { abortStream = collector collector.on('collect', (data) => { // TODO if the cache write fails, log a warning but return the response anyway - cacache.put(this.options.cachePath, this.key, data, cacheOpts).then(cacheWriteResolve, cacheWriteReject) + cacache.put(this.options.cachePath, this.key, data, cacheOpts) + .then(cacheWriteResolve, cacheWriteReject) }) body.unshift(collector) body.unshift(this.response.body) @@ -305,8 +321,9 @@ class CacheEntry { // know to be invalid to the cache abortStream.destroy(err) }) - } else + } else { await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } // note: we do not set the x-local-cache-hash header because we do not know // the hash value until after the write to the cache completes, which doesn't @@ -347,25 +364,37 @@ class CacheEntry { onResume = async () => { removeOnResume() try { - const content = await cacache.get.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + const content = await cacache.get.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) body.end(content) } catch (err) { - if (err.code === 'EINTEGRITY') - await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { await CacheEntry.invalidate(this.request, this.options) + } body.emit('error', err) } } } else { onResume = () => { - const cacheStream = cacache.get.stream.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) cacheStream.on('error', async (err) => { cacheStream.pause() - if (err.code === 'EINTEGRITY') - await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { await CacheEntry.invalidate(this.request, this.options) + } body.emit('error', err) cacheStream.resume() }) @@ -415,8 +444,9 @@ class CacheEntry { // if the network fetch fails, return the stale // cached response unless it has a cache-control // of 'must-revalidate' - if (!this.policy.mustRevalidate) + if (!this.policy.mustRevalidate) { return this.respond(request.method, options, 'stale') + } throw err } @@ -429,8 +459,12 @@ class CacheEntry { // in the old cache entry to the new one, if the new metadata does not already // include that header for (const name of KEEP_RESPONSE_HEADERS) { - if (!hasOwnProperty(metadata.resHeaders, name) && hasOwnProperty(this.entry.metadata.resHeaders, name)) + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } } try { diff --git a/deps/npm/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/make-fetch-happen/lib/cache/errors.js index 31e97c4b033c09..67a66573bebe66 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/cache/errors.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/cache/errors.js @@ -1,5 +1,6 @@ class NotCachedError extends Error { constructor (url) { + /* eslint-disable-next-line max-len */ super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) this.code = 'ENOTCACHED' } diff --git a/deps/npm/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/make-fetch-happen/lib/cache/index.js index cca93d9b4eb5d3..17a6425592bcf7 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/cache/index.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/cache/index.js @@ -8,8 +8,9 @@ const cacheFetch = async (request, options) => { const entry = await CacheEntry.find(request, options) if (!entry) { // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') + if (options.cache === 'only-if-cached') { throw new NotCachedError(request.url) + } // otherwise, we make a request, store it and return it const response = await remote(request, options) @@ -19,8 +20,9 @@ const cacheFetch = async (request, options) => { // we have a cached response that satisfies this request, however if the cache // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') + if (options.cache === 'no-cache') { return entry.revalidate(request, options) + } // if the cached entry is not stale, or if the cache mode is 'force-cache' or // 'only-if-cached' we can respond with the cached entry. set the status @@ -28,16 +30,18 @@ const cacheFetch = async (request, options) => { const _needsRevalidation = entry.policy.needsRevalidation(request) if (options.cache === 'force-cache' || options.cache === 'only-if-cached' || - !_needsRevalidation) + !_needsRevalidation) { return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } // if we got here, the cache entry is stale so revalidate it return entry.revalidate(request, options) } cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) + if (!options.cachePath) { return + } return CacheEntry.invalidate(request, options) } diff --git a/deps/npm/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/make-fetch-happen/lib/cache/policy.js index e0959f64ddf9df..ada3c8600dae92 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/cache/policy.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/cache/policy.js @@ -2,19 +2,6 @@ const CacheSemantics = require('http-cache-semantics') const Negotiator = require('negotiator') const ssri = require('ssri') -// HACK: negotiator lazy loads several of its own modules -// as a micro optimization. we need to be sure that they're -// in memory as soon as possible at startup so that we do -// not try to lazy load them after the directory has been -// retired during a self update of the npm CLI, we do this -// by calling all of the methods that trigger a lazy load -// on a fake instance. -const preloadNegotiator = new Negotiator({ headers: {} }) -preloadNegotiator.charsets() -preloadNegotiator.encodings() -preloadNegotiator.languages() -preloadNegotiator.mediaTypes() - // options passed to http-cache-semantics constructor const policyOptions = { shared: false, @@ -31,6 +18,7 @@ const requestObject = (request) => { method: request.method, url: request.url, headers: {}, + compress: request.compress, } request.headers.forEach((value, key) => { @@ -74,16 +62,19 @@ class CachePolicy { // static method to quickly determine if a request alone is storable static storable (request, options) { // no cachePath means no caching - if (!options.cachePath) + if (!options.cachePath) { return false + } // user explicitly asked not to cache - if (options.cache === 'no-store') + if (options.cache === 'no-store') { return false + } // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) + if (!['GET', 'HEAD'].includes(request.method)) { return false + } // otherwise, let http-cache-semantics make the decision // based on the request's headers @@ -94,23 +85,32 @@ class CachePolicy { // returns true if the policy satisfies the request satisfies (request) { const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { return false + } const negotiatorA = new Negotiator(this.request) const negotiatorB = new Negotiator(_req) - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { return false + } - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { return false + } - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { return false + } - if (this.options.integrity) + if (this.options.integrity) { return ssri.parse(this.options.integrity).match(this.entry.integrity) + } return true } diff --git a/deps/npm/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/make-fetch-happen/lib/fetch.js index dfded79295da1d..233ba67e165502 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/fetch.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/fetch.js @@ -13,20 +13,28 @@ const remote = require('./remote.js') // in the fetch being rejected if the redirect is // possible but invalid for some reason const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) + if (!isRedirect(response.status)) { return false + } - if (options.redirect === 'manual') + if (options.redirect === 'manual') { return false + } - if (options.redirect === 'error') - throw new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect', { code: 'ENOREDIRECT' }) + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } - if (!response.headers.has('location')) - throw new FetchError(`redirect location header missing for: ${request.url}`, 'no-location', { code: 'EINVALIDREDIRECT' }) + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } - if (request.counter >= request.follow) - throw new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect', { code: 'EMAXREDIRECT' }) + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } return true } @@ -39,26 +47,34 @@ const getRedirect = (request, response, options) => { const location = response.headers.get('location') const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) // Comment below is used under the following license: - // Copyright (c) 2010-2012 Mikeal Rogers - // Licensed under the Apache License, Version 2.0 (the "License"); - // you may not use this file except in compliance with the License. - // You may obtain a copy of the License at - // http://www.apache.org/licenses/LICENSE-2.0 - // Unless required by applicable law or agreed to in writing, - // software distributed under the License is distributed on an "AS - // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - // express or implied. See the License for the specific language - // governing permissions and limitations under the License. + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ // Remove authorization if changing hostnames (but not if just // changing ports or protocols). This matches the behavior of request: // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { request.headers.delete('authorization') + request.headers.delete('cookie') + } // for POST request with 301/302 response, or any request with 303 response, // use GET when following redirect - if (response.status === 303 || (request.method === 'POST' && [301, 302].includes(response.status))) { + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { _opts.method = 'GET' _opts.body = null request.headers.delete('content-length') @@ -87,11 +103,13 @@ const fetch = async (request, options) => { // request url if (!['GET', 'HEAD'].includes(request.method) && response.status >= 200 && - response.status <= 399) + response.status <= 399) { await cache.invalidate(request, options) + } - if (!canFollowRedirect(request, response, options)) + if (!canFollowRedirect(request, response, options)) { return response + } const redirect = getRedirect(request, response, options) return fetch(redirect.request, redirect.options) diff --git a/deps/npm/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/make-fetch-happen/lib/options.js index f6138e6e1d13a6..a0c8664adf02aa 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/options.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/options.js @@ -7,36 +7,40 @@ const conditionalHeaders = [ ] const configureOptions = (opts) => { - const {strictSSL, ...options} = { ...opts } + const { strictSSL, ...options } = { ...opts } options.method = options.method ? options.method.toUpperCase() : 'GET' options.rejectUnauthorized = strictSSL !== false - if (!options.retry) + if (!options.retry) { options.retry = { retries: 0 } - else if (typeof options.retry === 'string') { + } else if (typeof options.retry === 'string') { const retries = parseInt(options.retry, 10) - if (isFinite(retries)) + if (isFinite(retries)) { options.retry = { retries } - else + } else { options.retry = { retries: 0 } - } else if (typeof options.retry === 'number') + } + } else if (typeof options.retry === 'number') { options.retry = { retries: options.retry } - else + } else { options.retry = { retries: 0, ...options.retry } + } options.cache = options.cache || 'default' if (options.cache === 'default') { const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { return conditionalHeaders.includes(name.toLowerCase()) }) - if (hasConditionalHeader) + if (hasConditionalHeader) { options.cache = 'no-store' + } } // cacheManager is deprecated, but if it's set and // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) + if (options.cacheManager && !options.cachePath) { options.cachePath = options.cacheManager + } return options } diff --git a/deps/npm/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/make-fetch-happen/lib/remote.js index 7e4ed24edb5304..a8b8d2a0198d40 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/remote.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/remote.js @@ -29,11 +29,13 @@ const RETRY_TYPES = [ // and verifying response integrity const remoteFetch = (request, options) => { const agent = getAgent(request.url, options) - if (!request.headers.has('connection')) + if (!request.headers.has('connection')) { request.headers.set('connection', agent ? 'keep-alive' : 'close') + } - if (!request.headers.has('user-agent')) + if (!request.headers.has('user-agent')) { request.headers.set('user-agent', USER_AGENT) + } // keep our own options since we're overriding the agent // and the redirect mode @@ -64,8 +66,9 @@ const remoteFetch = (request, options) => { ([408, 420, 429].includes(res.status) || res.status >= 500) if (isRetriable) { - if (typeof options.onRetry === 'function') + if (typeof options.onRetry === 'function') { options.onRetry(res) + } return retryHandler(res) } @@ -82,18 +85,21 @@ const remoteFetch = (request, options) => { const isRetryError = err.retried instanceof fetch.Response || (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - if (req.method === 'POST' || isRetryError) + if (req.method === 'POST' || isRetryError) { throw err + } - if (typeof options.onRetry === 'function') + if (typeof options.onRetry === 'function') { options.onRetry(err) + } return retryHandler(err) } }, options.retry).catch((err) => { // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') + if (err.status >= 400 && err.type !== 'system') { return err + } throw err }) diff --git a/deps/npm/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/make-fetch-happen/package.json index dae7b37da40691..7b61953e56f575 100644 --- a/deps/npm/node_modules/make-fetch-happen/package.json +++ b/deps/npm/node_modules/make-fetch-happen/package.json @@ -1,20 +1,23 @@ { "name": "make-fetch-happen", - "version": "9.1.0", + "version": "10.0.0", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ + "bin", "lib" ], "scripts": { - "preversion": "npm t", + "preversion": "npm test", "postversion": "npm publish", - "prepublishOnly": "git push --follow-tags", + "prepublishOnly": "git push origin --follow-tags", "test": "tap", "posttest": "npm run lint", "eslint": "eslint", - "lint": "npm run eslint -- lib test", - "lintfix": "npm run lint -- --fix" + "lint": "eslint '**/*.js'", + "lintfix": "npm run lint -- --fix", + "postlint": "npm-template-check", + "snap": "tap" }, "repository": "https://github.com/npm/make-fetch-happen", "keywords": [ @@ -26,17 +29,13 @@ "cache", "subresource integrity" ], - "author": { - "name": "Kat Marchán", - "email": "kzm@zkat.tech", - "twitter": "maybekatz" - }, + "author": "GitHub Inc.", "license": "ISC", "dependencies": { "agentkeepalive": "^4.1.3", "cacache": "^15.2.0", "http-cache-semantics": "^4.1.0", - "http-proxy-agent": "^4.0.1", + "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", "lru-cache": "^6.0.0", @@ -45,20 +44,17 @@ "minipass-fetch": "^1.3.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.2", + "negotiator": "^0.6.3", "promise-retry": "^2.0.1", "socks-proxy-agent": "^6.0.0", "ssri": "^8.0.0" }, "devDependencies": { - "eslint": "^7.26.0", - "eslint-plugin-import": "^2.23.2", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.1.0", - "eslint-plugin-standard": "^5.0.0", + "@npmcli/template-oss": "^2.5.1", + "eslint": "^8.7.0", "mkdirp": "^1.0.4", "nock": "^13.0.11", - "npmlog": "^5.0.0", + "npmlog": "^6.0.0", "require-inject": "^1.4.2", "rimraf": "^3.0.2", "safe-buffer": "^5.2.1", @@ -66,11 +62,14 @@ "tap": "^15.0.9" }, "engines": { - "node": ">= 10" + "node": "^12.13.0 || ^14.15.0 || >=16" }, "tap": { "color": 1, "files": "test/*.js", "check-coverage": true + }, + "templateOSS": { + "version": "2.5.1" } } diff --git a/deps/npm/node_modules/negotiator/HISTORY.md b/deps/npm/node_modules/negotiator/HISTORY.md index 6d06c76aaa9650..a9a544914c43bb 100644 --- a/deps/npm/node_modules/negotiator/HISTORY.md +++ b/deps/npm/node_modules/negotiator/HISTORY.md @@ -1,3 +1,8 @@ +0.6.3 / 2022-01-22 +================== + + * Revert "Lazy-load modules from main entry point" + 0.6.2 / 2019-04-29 ================== diff --git a/deps/npm/node_modules/negotiator/index.js b/deps/npm/node_modules/negotiator/index.js index 8d4f6a226cb0d8..4788264b16c9f2 100644 --- a/deps/npm/node_modules/negotiator/index.js +++ b/deps/npm/node_modules/negotiator/index.js @@ -8,12 +8,10 @@ 'use strict'; -/** - * Cached loaded submodules. - * @private - */ - -var modules = Object.create(null); +var preferredCharsets = require('./lib/charset') +var preferredEncodings = require('./lib/encoding') +var preferredLanguages = require('./lib/language') +var preferredMediaTypes = require('./lib/mediaType') /** * Module exports. @@ -43,7 +41,6 @@ Negotiator.prototype.charset = function charset(available) { }; Negotiator.prototype.charsets = function charsets(available) { - var preferredCharsets = loadModule('charset').preferredCharsets; return preferredCharsets(this.request.headers['accept-charset'], available); }; @@ -53,7 +50,6 @@ Negotiator.prototype.encoding = function encoding(available) { }; Negotiator.prototype.encodings = function encodings(available) { - var preferredEncodings = loadModule('encoding').preferredEncodings; return preferredEncodings(this.request.headers['accept-encoding'], available); }; @@ -63,7 +59,6 @@ Negotiator.prototype.language = function language(available) { }; Negotiator.prototype.languages = function languages(available) { - var preferredLanguages = loadModule('language').preferredLanguages; return preferredLanguages(this.request.headers['accept-language'], available); }; @@ -73,7 +68,6 @@ Negotiator.prototype.mediaType = function mediaType(available) { }; Negotiator.prototype.mediaTypes = function mediaTypes(available) { - var preferredMediaTypes = loadModule('mediaType').preferredMediaTypes; return preferredMediaTypes(this.request.headers.accept, available); }; @@ -86,39 +80,3 @@ Negotiator.prototype.preferredLanguage = Negotiator.prototype.language; Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages; Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType; Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes; - -/** - * Load the given module. - * @private - */ - -function loadModule(moduleName) { - var module = modules[moduleName]; - - if (module !== undefined) { - return module; - } - - // This uses a switch for static require analysis - switch (moduleName) { - case 'charset': - module = require('./lib/charset'); - break; - case 'encoding': - module = require('./lib/encoding'); - break; - case 'language': - module = require('./lib/language'); - break; - case 'mediaType': - module = require('./lib/mediaType'); - break; - default: - throw new Error('Cannot find module \'' + moduleName + '\''); - } - - // Store to prevent invoking require() - modules[moduleName] = module; - - return module; -} diff --git a/deps/npm/node_modules/negotiator/lib/language.js b/deps/npm/node_modules/negotiator/lib/language.js index 62f737f0060219..a23167252719be 100644 --- a/deps/npm/node_modules/negotiator/lib/language.js +++ b/deps/npm/node_modules/negotiator/lib/language.js @@ -54,9 +54,9 @@ function parseLanguage(str, i) { var match = simpleLanguageRegExp.exec(str); if (!match) return null; - var prefix = match[1], - suffix = match[2], - full = prefix; + var prefix = match[1] + var suffix = match[2] + var full = prefix if (suffix) full += "-" + suffix; diff --git a/deps/npm/node_modules/negotiator/package.json b/deps/npm/node_modules/negotiator/package.json index 0c7ff3c2e64682..297635f6d34177 100644 --- a/deps/npm/node_modules/negotiator/package.json +++ b/deps/npm/node_modules/negotiator/package.json @@ -1,7 +1,7 @@ { "name": "negotiator", "description": "HTTP content negotiation", - "version": "0.6.2", + "version": "0.6.3", "contributors": [ "Douglas Christopher Wilson ", "Federico Romero ", @@ -18,10 +18,10 @@ ], "repository": "jshttp/negotiator", "devDependencies": { - "eslint": "5.16.0", - "eslint-plugin-markdown": "1.0.0", - "mocha": "6.1.4", - "nyc": "14.0.0" + "eslint": "7.32.0", + "eslint-plugin-markdown": "2.2.1", + "mocha": "9.1.3", + "nyc": "15.1.0" }, "files": [ "lib/", @@ -34,9 +34,9 @@ "node": ">= 0.6" }, "scripts": { - "lint": "eslint --plugin markdown --ext js,md .", + "lint": "eslint .", "test": "mocha --reporter spec --check-leaks --bail test/", - "test-cov": "nyc --reporter=html --reporter=text npm test", - "test-travis": "nyc --reporter=text npm test" + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" } } diff --git a/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts b/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts new file mode 100644 index 00000000000000..a7efe943b2acbc --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts @@ -0,0 +1,14 @@ +/// +import { EventEmitter } from 'events'; +declare function once(emitter: EventEmitter, name: string): once.CancelablePromise; +declare namespace once { + interface CancelFunction { + (): void; + } + interface CancelablePromise extends Promise { + cancel: CancelFunction; + } + type CancellablePromise = CancelablePromise; + function spread(emitter: EventEmitter, name: string): once.CancelablePromise; +} +export = once; diff --git a/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js b/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js new file mode 100644 index 00000000000000..bfd0dc88f758b8 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js @@ -0,0 +1,39 @@ +"use strict"; +function noop() { } +function once(emitter, name) { + const o = once.spread(emitter, name); + const r = o.then((args) => args[0]); + r.cancel = o.cancel; + return r; +} +(function (once) { + function spread(emitter, name) { + let c = null; + const p = new Promise((resolve, reject) => { + function cancel() { + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + p.cancel = noop; + } + function onEvent(...args) { + cancel(); + resolve(args); + } + function onError(err) { + cancel(); + reject(err); + } + c = cancel; + emitter.on(name, onEvent); + emitter.on('error', onError); + }); + if (!c) { + throw new TypeError('Could not get `cancel()` function'); + } + p.cancel = c; + return p; + } + once.spread = spread; +})(once || (once = {})); +module.exports = once; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map b/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map new file mode 100644 index 00000000000000..30d20491dbca83 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAEA,SAAS,IAAI,KAAI,CAAC;AAElB,SAAS,IAAI,CACZ,OAAqB,EACrB,IAAY;IAEZ,MAAM,CAAC,GAAG,IAAI,CAAC,MAAM,CAAM,OAAO,EAAE,IAAI,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAA8B,CAAC;IACtE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC;IACpB,OAAO,CAAC,CAAC;AACV,CAAC;AAED,WAAU,IAAI;IAWb,SAAgB,MAAM,CACrB,OAAqB,EACrB,IAAY;QAEZ,IAAI,CAAC,GAA+B,IAAI,CAAC;QACzC,MAAM,CAAC,GAAG,IAAI,OAAO,CAAI,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC5C,SAAS,MAAM;gBACd,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;gBACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;gBACzC,CAAC,CAAC,MAAM,GAAG,IAAI,CAAC;YACjB,CAAC;YACD,SAAS,OAAO,CAAC,GAAG,IAAW;gBAC9B,MAAM,EAAE,CAAC;gBACT,OAAO,CAAC,IAAS,CAAC,CAAC;YACpB,CAAC;YACD,SAAS,OAAO,CAAC,GAAU;gBAC1B,MAAM,EAAE,CAAC;gBACT,MAAM,CAAC,GAAG,CAAC,CAAC;YACb,CAAC;YACD,CAAC,GAAG,MAAM,CAAC;YACX,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC9B,CAAC,CAA8B,CAAC;QAChC,IAAI,CAAC,CAAC,EAAE;YACP,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC,CAAC;SACzD;QACD,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;QACb,OAAO,CAAC,CAAC;IACV,CAAC;IA5Be,WAAM,SA4BrB,CAAA;AACF,CAAC,EAxCS,IAAI,KAAJ,IAAI,QAwCb;AAED,iBAAS,IAAI,CAAC"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/package.json b/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/package.json new file mode 100644 index 00000000000000..8343f9fad73aba --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@tootallnate/once/package.json @@ -0,0 +1,45 @@ +{ + "name": "@tootallnate/once", + "version": "1.1.2", + "description": "Creates a Promise that waits for a single event", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha --reporter spec", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/once.git" + }, + "keywords": [], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/once/issues" + }, + "devDependencies": { + "@types/node": "^12.12.11", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "rimraf": "^3.0.0", + "typescript": "^3.7.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts new file mode 100644 index 00000000000000..3f043f7f9f7561 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts @@ -0,0 +1,32 @@ +/// +import net from 'net'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { HttpProxyAgentOptions } from '.'; +interface HttpProxyAgentClientRequest extends ClientRequest { + path: string; + output?: string[]; + outputData?: { + data: string; + }[]; + _header?: string | null; + _implicitHeader(): void; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +export default class HttpProxyAgent extends Agent { + private secureProxy; + private proxy; + constructor(_opts: string | HttpProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req: HttpProxyAgentClientRequest, opts: RequestOptions): Promise; +} +export {}; diff --git a/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js new file mode 100644 index 00000000000000..02528505168192 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js @@ -0,0 +1,145 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const net_1 = __importDefault(require("net")); +const tls_1 = __importDefault(require("tls")); +const url_1 = __importDefault(require("url")); +const debug_1 = __importDefault(require("debug")); +const once_1 = __importDefault(require("@tootallnate/once")); +const agent_base_1 = require("agent-base"); +const debug = debug_1.default('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + delete parsed.port; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield once_1.default(socket, 'connect'); + return socket; + }); + } +} +exports.default = HttpProxyAgent; +//# sourceMappingURL=agent.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map new file mode 100644 index 00000000000000..7a407620d8e50a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,eAAW,CAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,OAAO,MAAM,CAAC,IAAI,CAAC;aACnB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,cAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts new file mode 100644 index 00000000000000..24bdb52efcedcb --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts @@ -0,0 +1,21 @@ +/// +import net from 'net'; +import tls from 'tls'; +import { Url } from 'url'; +import { AgentOptions } from 'agent-base'; +import _HttpProxyAgent from './agent'; +declare function createHttpProxyAgent(opts: string | createHttpProxyAgent.HttpProxyAgentOptions): _HttpProxyAgent; +declare namespace createHttpProxyAgent { + interface BaseHttpProxyAgentOptions { + secureProxy?: boolean; + host?: string | null; + path?: string | null; + port?: string | number | null; + } + export interface HttpProxyAgentOptions extends AgentOptions, BaseHttpProxyAgentOptions, Partial> { + } + export type HttpProxyAgent = _HttpProxyAgent; + export const HttpProxyAgent: typeof _HttpProxyAgent; + export {}; +} +export = createHttpProxyAgent; diff --git a/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js new file mode 100644 index 00000000000000..0a71180594605b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map new file mode 100644 index 00000000000000..e07dae5b08455a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAIA,oDAAsC;AAEtC,SAAS,oBAAoB,CAC5B,IAAyD;IAEzD,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC;AAClC,CAAC;AAED,WAAU,oBAAoB;IAmBhB,mCAAc,GAAG,eAAe,CAAC;IAE9C,oBAAoB,CAAC,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;AAC5D,CAAC,EAtBS,oBAAoB,KAApB,oBAAoB,QAsB7B;AAED,iBAAS,oBAAoB,CAAC"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/package.json b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/package.json new file mode 100644 index 00000000000000..870dd5d8af267a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/http-proxy-agent/package.json @@ -0,0 +1,57 @@ +{ + "name": "http-proxy-agent", + "version": "4.0.1", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-http-proxy-agent.git" + }, + "keywords": [ + "http", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-http-proxy-agent/issues" + }, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.12.11", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^3.5.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 00000000000000..8d28acf866d932 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js new file mode 100644 index 00000000000000..3675dd8ae981a9 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js @@ -0,0 +1,194 @@ +'use strict' +const LRU = require('lru-cache') +const url = require('url') +const isLambda = require('is-lambda') + +const AGENT_CACHE = new LRU({ max: 50 }) +const HttpAgent = require('agentkeepalive') +const HttpsAgent = HttpAgent.HttpsAgent + +module.exports = getAgent + +const getAgentTimeout = timeout => + typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 + +const getMaxSockets = maxSockets => maxSockets || 15 + +function getAgent (uri, opts) { + const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) + const isHttps = parsedUri.protocol === 'https:' + const pxuri = getProxyUri(parsedUri.href, opts) + + // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout + // of zero disables the timeout behavior (OS limits still apply). Else, if + // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that + // the node-fetch-npm timeout will always fire first, giving us more + // consistent errors. + const agentTimeout = getAgentTimeout(opts.timeout) + const agentMaxSockets = getMaxSockets(opts.maxSockets) + + const key = [ + `https:${isHttps}`, + pxuri + ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` + : '>no-proxy<', + `local-address:${opts.localAddress || '>no-local-address<'}`, + `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, + `ca:${(isHttps && opts.ca) || '>no-ca<'}`, + `cert:${(isHttps && opts.cert) || '>no-cert<'}`, + `key:${(isHttps && opts.key) || '>no-key<'}`, + `timeout:${agentTimeout}`, + `maxSockets:${agentMaxSockets}`, + ].join(':') + + if (opts.agent != null) { // `agent: false` has special behavior! + return opts.agent + } + + // keep alive in AWS lambda makes no sense + const lambdaAgent = !isLambda ? null + : isHttps ? require('https').globalAgent + : require('http').globalAgent + + if (isLambda && !pxuri) + return lambdaAgent + + if (AGENT_CACHE.peek(key)) + return AGENT_CACHE.get(key) + + if (pxuri) { + const pxopts = isLambda ? { + ...opts, + agent: lambdaAgent, + } : opts + const proxy = getProxy(pxuri, pxopts, isHttps) + AGENT_CACHE.set(key, proxy) + return proxy + } + + const agent = isHttps ? new HttpsAgent({ + maxSockets: agentMaxSockets, + ca: opts.ca, + cert: opts.cert, + key: opts.key, + localAddress: opts.localAddress, + rejectUnauthorized: opts.rejectUnauthorized, + timeout: agentTimeout, + }) : new HttpAgent({ + maxSockets: agentMaxSockets, + localAddress: opts.localAddress, + timeout: agentTimeout, + }) + AGENT_CACHE.set(key, agent) + return agent +} + +function checkNoProxy (uri, opts) { + const host = new url.URL(uri).hostname.split('.').reverse() + let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) + if (typeof noproxy === 'string') + noproxy = noproxy.split(/\s*,\s*/g) + + return noproxy && noproxy.some(no => { + const noParts = no.split('.').filter(x => x).reverse() + if (!noParts.length) + return false + for (let i = 0; i < noParts.length; i++) { + if (host[i] !== noParts[i]) + return false + } + return true + }) +} + +module.exports.getProcessEnv = getProcessEnv + +function getProcessEnv (env) { + if (!env) + return + + let value + + if (Array.isArray(env)) { + for (const e of env) { + value = process.env[e] || + process.env[e.toUpperCase()] || + process.env[e.toLowerCase()] + if (typeof value !== 'undefined') + break + } + } + + if (typeof env === 'string') { + value = process.env[env] || + process.env[env.toUpperCase()] || + process.env[env.toLowerCase()] + } + + return value +} + +module.exports.getProxyUri = getProxyUri +function getProxyUri (uri, opts) { + const protocol = new url.URL(uri).protocol + + const proxy = opts.proxy || + ( + protocol === 'https:' && + getProcessEnv('https_proxy') + ) || + ( + protocol === 'http:' && + getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) + ) + if (!proxy) + return null + + const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy + + return !checkNoProxy(uri, opts) && parsedProxy +} + +const getAuth = u => + u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) + : u.username ? decodeURIComponent(u.username) + : null + +const getPath = u => u.pathname + u.search + u.hash + +const HttpProxyAgent = require('http-proxy-agent') +const HttpsProxyAgent = require('https-proxy-agent') +const SocksProxyAgent = require('socks-proxy-agent') +module.exports.getProxy = getProxy +function getProxy (proxyUrl, opts, isHttps) { + const popts = { + host: proxyUrl.hostname, + port: proxyUrl.port, + protocol: proxyUrl.protocol, + path: getPath(proxyUrl), + auth: getAuth(proxyUrl), + ca: opts.ca, + cert: opts.cert, + key: opts.key, + timeout: getAgentTimeout(opts.timeout), + localAddress: opts.localAddress, + maxSockets: getMaxSockets(opts.maxSockets), + rejectUnauthorized: opts.rejectUnauthorized, + } + + if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { + if (!isHttps) + return new HttpProxyAgent(popts) + else + return new HttpsProxyAgent(popts) + } else if (proxyUrl.protocol.startsWith('socks')) + return new SocksProxyAgent(popts) + else { + throw Object.assign( + new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), + { + url: proxyUrl.href, + } + ) + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 00000000000000..a2acea156ee6f5 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,460 @@ +const { Request, Response } = require('minipass-fetch') +const Minipass = require('minipass') +const MinipassCollect = require('minipass-collect') +const MinipassFlush = require('minipass-flush') +const MinipassPipeline = require('minipass-pipeline') +const cacache = require('cacache') +const url = require('url') + +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// maximum amount of data we will buffer into memory +// if we'll exceed this, we switch to streaming +const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) + metadata.status = response.status + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) + metadata.reqHeaders[name] = request.headers.get(name) + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) + metadata.reqHeaders.host = host + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + // explicitly ignore accept-encoding here + if (name !== 'accept-encoding' && request.headers.has(name)) + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) + metadata.resHeaders[name] = response.headers.get(name) + } + + // we only store accept-encoding and content-encoding if the user + // has disabled automatic compression and decompression in minipass-fetch + // since if it's enabled (the default) then the content will have + // already been decompressed making the header a lie + if (options.compress === false) { + metadata.reqHeaders['accept-encoding'] = request.headers.get('accept-encoding') + metadata.resHeaders['content-encoding'] = response.headers.get('content-encoding') + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else + this.key = cacheKey(request) + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) + return !!(entry.metadata && entry.metadata.status) + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') + return + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if (this.request.method !== 'GET' || ![200, 301, 308].includes(this.response.status) || !this.policy.storable()) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const fitsInMemory = !!size && Number(size) < MAX_MEM_SIZE + const shouldBuffer = this.options.memoize !== false && fitsInMemory + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + memoize: fitsInMemory && this.options.memoize, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }) + + body = new MinipassPipeline(new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + + let abortStream, onResume + if (shouldBuffer) { + // if the result fits in memory, use a collect stream to gather + // the response and write it to cacache while also passing it through + // to the user + onResume = () => { + const collector = new MinipassCollect.PassThrough() + abortStream = collector + collector.on('collect', (data) => { + // TODO if the cache write fails, log a warning but return the response anyway + cacache.put(this.options.cachePath, this.key, data, cacheOpts).then(cacheWriteResolve, cacheWriteReject) + }) + body.unshift(collector) + body.unshift(this.response.body) + } + } else { + // if it does not fit in memory, create a tee stream and use + // that to pipe to both the cache and the user simultaneously + onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + abortStream = cacheStream + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + this.response.body.on('error', (err) => { + // the abortStream will either be a MinipassCollect if we buffer + // or a cacache write stream, either way be sure to listen for + // errors from the actual response and avoid writing data that we + // know to be invalid to the cache + abortStream.destroy(err) + }) + } else + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + const size = Number(this.response.headers.get('content-length')) + const fitsInMemory = !!size && size < MAX_MEM_SIZE + const shouldBuffer = this.options.memoize !== false && fitsInMemory + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const removeOnResume = () => body.removeListener('resume', onResume) + let onResume + if (shouldBuffer) { + onResume = async () => { + removeOnResume() + try { + const content = await cacache.get.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + body.end(content) + } catch (err) { + if (err.code === 'EINTEGRITY') + await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + await CacheEntry.invalidate(this.request, this.options) + body.emit('error', err) + } + } + } else { + onResume = () => { + const cacheStream = cacache.get.stream.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') + await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + await CacheEntry.invalidate(this.request, this.options) + body.emit('error', err) + cacheStream.resume() + }) + cacheStream.pipe(body) + } + } + + body.once('resume', onResume) + body.once('end', removeOnResume) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers: { + ...this.policy.responseHeaders(), + }, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) + return this.respond(request.method, options, 'stale') + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if (!hasOwnProperty(metadata.resHeaders, name) && hasOwnProperty(this.entry.metadata.resHeaders, name)) + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 00000000000000..31e97c4b033c09 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,10 @@ +class NotCachedError extends Error { + constructor (url) { + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 00000000000000..cca93d9b4eb5d3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,45 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') + throw new NotCachedError(request.url) + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const entry = new CacheEntry({ request, response, options }) + return entry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') + return entry.revalidate(request, options) + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) + return + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 00000000000000..f7684d562b7fae --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 00000000000000..e0959f64ddf9df --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// HACK: negotiator lazy loads several of its own modules +// as a micro optimization. we need to be sure that they're +// in memory as soon as possible at startup so that we do +// not try to lazy load them after the directory has been +// retired during a self update of the npm CLI, we do this +// by calling all of the methods that trigger a lazy load +// on a fake instance. +const preloadNegotiator = new Negotiator({ headers: {} }) +preloadNegotiator.charsets() +preloadNegotiator.encodings() +preloadNegotiator.languages() +preloadNegotiator.mediaTypes() + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) + return false + + // user explicitly asked not to cache + if (options.cache === 'no-store') + return false + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) + return false + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) + return false + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) + return false + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) + return false + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) + return false + + if (this.options.integrity) + return ssri.parse(this.options.integrity).match(this.entry.integrity) + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 00000000000000..dfded79295da1d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,100 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) + return false + + if (options.redirect === 'manual') + return false + + if (options.redirect === 'error') + throw new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect', { code: 'ENOREDIRECT' }) + + if (!response.headers.has('location')) + throw new FetchError(`redirect location header missing for: ${request.url}`, 'no-location', { code: 'EINVALIDREDIRECT' }) + + if (request.counter >= request.follow) + throw new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect', { code: 'EMAXREDIRECT' }) + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + // Copyright (c) 2010-2012 Mikeal Rogers + // Licensed under the Apache License, Version 2.0 (the "License"); + // you may not use this file except in compliance with the License. + // You may obtain a copy of the License at + // http://www.apache.org/licenses/LICENSE-2.0 + // Unless required by applicable law or agreed to in writing, + // software distributed under the License is distributed on an "AS + // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + // express or implied. See the License for the specific language + // governing permissions and limitations under the License. + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) + request.headers.delete('authorization') + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if (response.status === 303 || (request.method === 'POST' && [301, 302].includes(response.status))) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) + await cache.invalidate(request, options) + + if (!canFollowRedirect(request, response, options)) + return response + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 00000000000000..6028bc0725129a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,40 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return makeFetchHappen(finalUrl, finalOptions) + } + + defaultedFetch.defaults = makeFetchHappen.defaults + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 00000000000000..f6138e6e1d13a6 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,44 @@ +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const {strictSSL, ...options} = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) + options.retry = { retries: 0 } + else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) + options.retry = { retries } + else + options.retry = { retries: 0 } + } else if (typeof options.retry === 'number') + options.retry = { retries: options.retry } + else + options.retry = { retries: 0, ...options.retry } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) + options.cache = 'no-store' + } + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) + options.cachePath = options.cacheManager + + return options +} + +module.exports = configureOptions diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 00000000000000..7e4ed24edb5304 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,102 @@ +const Minipass = require('minipass') +const MinipassPipeline = require('minipass-pipeline') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') + +const getAgent = require('./agent.js') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) + request.headers.set('connection', agent ? 'keep-alive' : 'close') + + if (!request.headers.has('user-agent')) + request.headers.set('user-agent', USER_AGENT) + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ integrity: _opts.integrity }) + res = new fetch.Response(new MinipassPipeline(res.body, integrityStream), res) + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') + options.onRetry(res) + + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) + throw err + + if (typeof options.onRetry === 'function') + options.onRetry(err) + + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') + return err + + throw err + }) +} + +module.exports = remoteFetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json new file mode 100644 index 00000000000000..dae7b37da40691 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json @@ -0,0 +1,76 @@ +{ + "name": "make-fetch-happen", + "version": "9.1.0", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "lib" + ], + "scripts": { + "preversion": "npm t", + "postversion": "npm publish", + "prepublishOnly": "git push --follow-tags", + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "npm run eslint -- lib test", + "lintfix": "npm run lint -- --fix" + }, + "repository": "https://github.com/npm/make-fetch-happen", + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech", + "twitter": "maybekatz" + }, + "license": "ISC", + "dependencies": { + "agentkeepalive": "^4.1.3", + "cacache": "^15.2.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^6.0.0", + "minipass": "^3.1.3", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^1.3.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^6.0.0", + "ssri": "^8.0.0" + }, + "devDependencies": { + "eslint": "^7.26.0", + "eslint-plugin-import": "^2.23.2", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", + "mkdirp": "^1.0.4", + "nock": "^13.0.11", + "npmlog": "^5.0.0", + "require-inject": "^1.4.2", + "rimraf": "^3.0.2", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.0", + "tap": "^15.0.9" + }, + "engines": { + "node": ">= 10" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true + } +} diff --git a/deps/npm/node_modules/npm-profile/LICENSE b/deps/npm/node_modules/npm-profile/LICENSE deleted file mode 100644 index 7953647e7760b8..00000000000000 --- a/deps/npm/node_modules/npm-profile/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/npm-profile/LICENSE.md b/deps/npm/node_modules/npm-profile/LICENSE.md new file mode 100644 index 00000000000000..5fc208ff122e08 --- /dev/null +++ b/deps/npm/node_modules/npm-profile/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/npm-profile/index.js b/deps/npm/node_modules/npm-profile/lib/index.js similarity index 92% rename from deps/npm/node_modules/npm-profile/index.js rename to deps/npm/node_modules/npm-profile/lib/index.js index 4f2a2ae7cc2ff2..aa322e37f4824a 100644 --- a/deps/npm/node_modules/npm-profile/index.js +++ b/deps/npm/node_modules/npm-profile/lib/index.js @@ -1,7 +1,7 @@ 'use strict' const fetch = require('npm-registry-fetch') -const { HttpErrorBase } = require('npm-registry-fetch/errors.js') +const { HttpErrorBase } = require('npm-registry-fetch/lib/errors') const os = require('os') const { URL } = require('url') @@ -57,7 +57,7 @@ const webAuth = (opener, opts, body) => { return fetch(target, { ...opts, method: 'POST', - body + body, }).then(res => { return Promise.all([res, res.json()]) }).then(([res, content]) => { @@ -76,7 +76,7 @@ const webAuth = (opener, opts, body) => { if ((er.statusCode >= 400 && er.statusCode <= 499) || er.statusCode === 500) { throw new WebLoginNotSupported('POST', { status: er.statusCode, - headers: { raw: () => er.headers } + headers: { raw: () => er.headers }, }, er.body) } else { throw er @@ -115,11 +115,11 @@ const adduserCouch = (username, email, password, opts = {}) => { email: email, type: 'user', roles: [], - date: new Date().toISOString() + date: new Date().toISOString(), } const logObj = { ...body, - password: 'XXXXX' + password: 'XXXXX', } process.emit('log', 'verbose', 'adduser', 'before first PUT', logObj) @@ -127,7 +127,7 @@ const adduserCouch = (username, email, password, opts = {}) => { return fetch.json(target, { ...opts, method: 'PUT', - body + body, }).then(result => { result.username = username return result @@ -141,11 +141,11 @@ const loginCouch = (username, password, opts = {}) => { password: password, type: 'user', roles: [], - date: new Date().toISOString() + date: new Date().toISOString(), } const logObj = { ...body, - password: 'XXXXX' + password: 'XXXXX', } process.emit('log', 'verbose', 'login', 'before first PUT', logObj) @@ -153,16 +153,18 @@ const loginCouch = (username, password, opts = {}) => { return fetch.json(target, { ...opts, method: 'PUT', - body + body, }).catch(err => { if (err.code === 'E400') { err.message = `There is no user with the username "${username}".` throw err } - if (err.code !== 'E409') throw err + if (err.code !== 'E409') { + throw err + } return fetch.json(target, { ...opts, - query: { write: true } + query: { write: true }, }).then(result => { Object.keys(result).forEach(k => { if (!body[k] || k === 'roles') { @@ -177,8 +179,8 @@ const loginCouch = (username, password, opts = {}) => { forceAuth: { username, password: Buffer.from(password, 'utf8').toString('base64'), - otp - } + otp, + }, }) }) }).then(result => { @@ -192,12 +194,14 @@ const get = (opts = {}) => fetch.json('/-/npm/v1/user', opts) const set = (profile, opts = {}) => { Object.keys(profile).forEach(key => { // profile keys can't be empty strings, but they CAN be null - if (profile[key] === '') profile[key] = null + if (profile[key] === '') { + profile[key] = null + } }) return fetch.json('/-/npm/v1/user', { ...opts, method: 'POST', - body: profile + body: profile, }) } @@ -220,7 +224,7 @@ const removeToken = (tokenKey, opts = {}) => { return fetch(target, { ...opts, method: 'DELETE', - ignoreBody: true + ignoreBody: true, }).then(() => null) } @@ -231,8 +235,8 @@ const createToken = (password, readonly, cidrs, opts = {}) => { body: { password: password, readonly: readonly, - cidr_whitelist: cidrs - } + cidr_whitelist: cidrs, + }, }) } @@ -267,5 +271,5 @@ module.exports = { set, listTokens, removeToken, - createToken + createToken, } diff --git a/deps/npm/node_modules/npm-profile/package.json b/deps/npm/node_modules/npm-profile/package.json index 43cc7c921bb049..8745c2559f33fa 100644 --- a/deps/npm/node_modules/npm-profile/package.json +++ b/deps/npm/node_modules/npm-profile/package.json @@ -1,39 +1,45 @@ { "name": "npm-profile", - "version": "5.0.4", + "version": "6.0.0", "description": "Library for updating an npmjs.com profile", "keywords": [], - "author": "Rebecca Turner (http://re-becca.org/)", + "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "npm-registry-fetch": "^11.0.0" + "npm-registry-fetch": "^12.0.0" }, - "main": "index.js", + "main": "./lib/index.js", "repository": { "type": "git", "url": "git+https://github.com/npm/npm-profile.git" }, "files": [ - "index.js" + "bin", + "lib" ], "devDependencies": { - "nock": "^12.0.1", - "require-inject": "^1.4.4", - "standard": "^14.3.1", - "tap": "^14.10.6" + "@npmcli/template-oss": "^2.5.1", + "nock": "^13.2.1", + "tap": "^15.1.5" }, "scripts": { "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", - "posttest": "standard index.js", + "posttest": "npm run lint", "test": "tap", - "snap": "tap" + "snap": "tap", + "lint": "eslint '**/*.js'", + "postlint": "npm-template-check", + "lintfix": "npm run lint -- --fix" }, "tap": { "check-coverage": true }, "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "version": "2.5.1" } } diff --git a/deps/npm/node_modules/npm-registry-fetch/LICENSE.md b/deps/npm/node_modules/npm-registry-fetch/LICENSE.md index 8d28acf866d932..5fc208ff122e08 100644 --- a/deps/npm/node_modules/npm-registry-fetch/LICENSE.md +++ b/deps/npm/node_modules/npm-registry-fetch/LICENSE.md @@ -1,16 +1,20 @@ + + ISC License -Copyright (c) npm, Inc. +Copyright npm, Inc. -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/npm-registry-fetch/auth.js b/deps/npm/node_modules/npm-registry-fetch/lib/auth.js similarity index 94% rename from deps/npm/node_modules/npm-registry-fetch/auth.js rename to deps/npm/node_modules/npm-registry-fetch/lib/auth.js index 01a4436a5d2a88..e6b50b12eb2074 100644 --- a/deps/npm/node_modules/npm-registry-fetch/auth.js +++ b/deps/npm/node_modules/npm-registry-fetch/lib/auth.js @@ -12,8 +12,9 @@ const regKeyFromURI = (uri, opts) => { let regKey = `//${parsed.host}${parsed.pathname}` while (regKey.length > '//'.length) { // got some auth for this URI - if (hasAuth(regKey, opts)) + if (hasAuth(regKey, opts)) { return regKey + } // can be either //host/some/path/:_auth or //host/some/path:_auth // walk up by removing EITHER what's after the slash OR the slash itself @@ -44,8 +45,9 @@ const getRegistry = opts => { const getAuth = (uri, opts = {}) => { const { forceAuth } = opts - if (!uri) + if (!uri) { throw new Error('URI is required') + } const regKey = regKeyFromURI(uri, forceAuth || opts) // we are only allowed to use what's in forceAuth if specified @@ -62,9 +64,9 @@ const getAuth = (uri, opts = {}) => { // no auth for this URI, but might have it for the registry if (!regKey) { const registry = getRegistry(opts) - if (registry && uri !== registry && sameHost(uri, registry)) + if (registry && uri !== registry && sameHost(uri, registry)) { return getAuth(registry, opts) - else if (registry !== opts.registry) { + } else if (registry !== opts.registry) { // If making a tarball request to a different base URI than the // registry where we logged in, but the same auth SHOULD be sent // to that artifact host, then we track where it was coming in from, @@ -96,11 +98,11 @@ class Auth { this.token = null this.auth = null this.isBasicAuth = false - if (token) + if (token) { this.token = token - else if (auth) + } else if (auth) { this.auth = auth - else if (username && password) { + } else if (username && password) { const p = Buffer.from(password, 'base64').toString('utf8') this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64') this.isBasicAuth = true diff --git a/deps/npm/node_modules/npm-registry-fetch/check-response.js b/deps/npm/node_modules/npm-registry-fetch/lib/check-response.js similarity index 89% rename from deps/npm/node_modules/npm-registry-fetch/check-response.js rename to deps/npm/node_modules/npm-registry-fetch/lib/check-response.js index 8bd85661ee8cae..26043a96de854e 100644 --- a/deps/npm/node_modules/npm-registry-fetch/check-response.js +++ b/deps/npm/node_modules/npm-registry-fetch/lib/check-response.js @@ -4,11 +4,14 @@ const errors = require('./errors.js') const { Response } = require('minipass-fetch') const defaultOpts = require('./default-opts.js') +/* eslint-disable-next-line max-len */ +const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry' const checkResponse = async ({ method, uri, res, registry, startTime, auth, opts }) => { opts = { ...defaultOpts, ...opts } - if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) + if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) { opts.log.notice('', res.headers.get('npm-notice')) + } if (res.status >= 400) { logRequest(method, res, startTime, opts) @@ -21,7 +24,7 @@ const checkResponse = URI: ${uri} Scoped Registry Key: ${auth.scopeAuthKey} -More info here: https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry`) +More info here: ${moreInfoUrl}`) } return checkErrors(method, res, startTime, opts) } else { @@ -46,8 +49,9 @@ function logRequest (method, res, startTime, opts) { try { const { URL } = require('url') const url = new URL(res.url) - if (url.password) + if (url.password) { url.password = '***' + } urlStr = url.toString() } catch (er) { @@ -85,7 +89,11 @@ function checkErrors (method, res, startTime, opts) { method, res, parsed, opts.spec ) } - } else if (res.status === 401 && body != null && /one-time pass/.test(body.toString('utf8'))) { + } else if ( + res.status === 401 && + body != null && + /one-time pass/.test(body.toString('utf8')) + ) { // Heuristic for malformed OTP responses that don't include the // www-authenticate header. throw new errors.HttpErrorAuthOTP( diff --git a/deps/npm/node_modules/npm-registry-fetch/default-opts.js b/deps/npm/node_modules/npm-registry-fetch/lib/default-opts.js similarity index 90% rename from deps/npm/node_modules/npm-registry-fetch/default-opts.js rename to deps/npm/node_modules/npm-registry-fetch/lib/default-opts.js index 9ca3f97d0352e9..e8e8221da4a589 100644 --- a/deps/npm/node_modules/npm-registry-fetch/default-opts.js +++ b/deps/npm/node_modules/npm-registry-fetch/lib/default-opts.js @@ -1,4 +1,4 @@ -const pkg = require('./package.json') +const pkg = require('../package.json') module.exports = { log: require('./silentlog.js'), maxSockets: 12, diff --git a/deps/npm/node_modules/npm-registry-fetch/errors.js b/deps/npm/node_modules/npm-registry-fetch/lib/errors.js similarity index 97% rename from deps/npm/node_modules/npm-registry-fetch/errors.js rename to deps/npm/node_modules/npm-registry-fetch/lib/errors.js index e65e5fbd80ddaf..0efc923e3e900f 100644 --- a/deps/npm/node_modules/npm-registry-fetch/errors.js +++ b/deps/npm/node_modules/npm-registry-fetch/lib/errors.js @@ -8,10 +8,11 @@ function packageName (href) { if (!basePath.match(/^-/)) { basePath = basePath.split('/') var index = basePath.indexOf('_rewrite') - if (index === -1) + if (index === -1) { index = basePath.length - 1 - else + } else { index++ + } return decodeURIComponent(basePath[index]) } } catch (_) { diff --git a/deps/npm/node_modules/npm-registry-fetch/index.js b/deps/npm/node_modules/npm-registry-fetch/lib/index.js similarity index 87% rename from deps/npm/node_modules/npm-registry-fetch/index.js rename to deps/npm/node_modules/npm-registry-fetch/lib/index.js index 35fab75bcade98..19c921403e5cd0 100644 --- a/deps/npm/node_modules/npm-registry-fetch/index.js +++ b/deps/npm/node_modules/npm-registry-fetch/lib/index.js @@ -60,11 +60,14 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { typeof body === 'object' && typeof body.then === 'function' - if (body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)) { + if ( + body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body) + ) { headers['content-type'] = headers['content-type'] || 'application/json' body = JSON.stringify(body) - } else if (body && !headers['content-type']) + } else if (body && !headers['content-type']) { headers['content-type'] = 'application/octet-stream' + } if (opts.gzip) { headers['content-encoding'] = 'gzip' @@ -73,8 +76,9 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { body.on('error', /* istanbul ignore next: unlikely and hard to test */ err => gz.emit('error', err)) body = body.pipe(gz) - } else if (!bodyIsPromise) + } else if (!bodyIsPromise) { body = new zlib.Gzip().end(body).concat() + } } const parsed = new url.URL(uri) @@ -84,8 +88,9 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { : opts.query Object.keys(q).forEach(key => { - if (q[key] !== undefined) + if (q[key] !== undefined) { parsed.searchParams.set(key, q[key]) + } }) uri = url.format(parsed) } @@ -105,7 +110,7 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { algorithms: opts.algorithms, body, cache: getCacheMode(opts), - cacheManager: opts.cache, + cachePath: opts.cache, ca: opts.ca, cert: opts.cert, headers, @@ -138,17 +143,24 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { if (typeof opts.otpPrompt === 'function') { return p.catch(async er => { if (er instanceof HttpErrorAuthOTP) { + let otp // if otp fails to complete, we fail with that failure - const otp = await opts.otpPrompt() - // if no otp provided, throw the original HTTP error - if (!otp) + try { + otp = await opts.otpPrompt() + } catch (_) { + // ignore this error + } + // if no otp provided, or otpPrompt errored, throw the original HTTP error + if (!otp) { throw er + } return regFetch(uri, { ...opts, otp }) } throw er }) - } else + } else { return p + } } return Promise.resolve(body).then(doFetch) @@ -178,11 +190,13 @@ function pickRegistry (spec, opts = {}) { let registry = spec.scope && opts[spec.scope.replace(/^@?/, '@') + ':registry'] - if (!registry && opts.scope) + if (!registry && opts.scope) { registry = opts[opts.scope.replace(/^@?/, '@') + ':registry'] + } - if (!registry) + if (!registry) { registry = opts.registry || defaultOpts.registry + } return registry } @@ -199,24 +213,29 @@ function getHeaders (uri, auth, opts) { 'user-agent': opts.userAgent, }, opts.headers || {}) - if (opts.projectScope) - headers['npm-scope'] = opts.projectScope + if (opts.scope) { + headers['npm-scope'] = opts.scope + } - if (opts.npmSession) + if (opts.npmSession) { headers['npm-session'] = opts.npmSession + } - if (opts.npmCommand) + if (opts.npmCommand) { headers['npm-command'] = opts.npmCommand + } // If a tarball is hosted on a different place than the manifest, only send // credentials on `alwaysAuth` - if (auth.token) + if (auth.token) { headers.authorization = `Bearer ${auth.token}` - else if (auth.auth) + } else if (auth.auth) { headers.authorization = `Basic ${auth.auth}` + } - if (opts.otp) + if (opts.otp) { headers['npm-otp'] = opts.otp + } return headers } diff --git a/deps/npm/node_modules/npm-registry-fetch/silentlog.js b/deps/npm/node_modules/npm-registry-fetch/lib/silentlog.js similarity index 100% rename from deps/npm/node_modules/npm-registry-fetch/silentlog.js rename to deps/npm/node_modules/npm-registry-fetch/lib/silentlog.js diff --git a/deps/npm/node_modules/npm-registry-fetch/package.json b/deps/npm/node_modules/npm-registry-fetch/package.json index e4eaabaa5b09a6..ff4482b1fdc9e0 100644 --- a/deps/npm/node_modules/npm-registry-fetch/package.json +++ b/deps/npm/node_modules/npm-registry-fetch/package.json @@ -1,22 +1,25 @@ { "name": "npm-registry-fetch", - "version": "11.0.0", + "version": "12.0.1", "description": "Fetch-based http client for use with npm registry APIs", - "main": "index.js", + "main": "lib", "files": [ - "*.js" + "bin", + "lib" ], "scripts": { "eslint": "eslint", - "lint": "npm run npmclilint -- \"*.*js\" \"test/**/*.*js\"", + "lint": "eslint '**/*.js'", "lintfix": "npm run lint -- --fix", "prepublishOnly": "git push origin --follow-tags", "preversion": "npm test", "postversion": "npm publish", "test": "tap", - "posttest": "npm run lint --", + "posttest": "npm run lint", "npmclilint": "npmcli-lint", - "postsnap": "npm run lintfix --" + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "snap": "tap" }, "repository": "https://github.com/npm/npm-registry-fetch", "keywords": [ @@ -24,14 +27,10 @@ "registry", "fetch" ], - "author": { - "name": "Kat Marchán", - "email": "kzm@sykosomatic.org", - "twitter": "maybekatz" - }, + "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "make-fetch-happen": "^9.0.1", + "make-fetch-happen": "^10.0.0", "minipass": "^3.1.3", "minipass-fetch": "^1.3.0", "minipass-json-stream": "^1.0.1", @@ -39,7 +38,7 @@ "npm-package-arg": "^8.0.0" }, "devDependencies": { - "@npmcli/lint": "^1.0.1", + "@npmcli/template-oss": "^2.5.1", "cacache": "^15.0.0", "nock": "^13.1.0", "npmlog": "^4.1.2", @@ -52,6 +51,9 @@ "test-ignore": "test[\\\\/](util|cache)[\\\\/]" }, "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "version": "2.5.1" } } diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json index d0fe0a065b414f..ffbc067f68dee6 100644 --- a/deps/npm/node_modules/pacote/package.json +++ b/deps/npm/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "12.0.2", + "version": "12.0.3", "description": "JavaScript package downloader", "author": "Isaac Z. Schlueter (https://izs.me)", "bin": { @@ -46,7 +46,7 @@ "npm-package-arg": "^8.0.1", "npm-packlist": "^3.0.0", "npm-pick-manifest": "^6.0.0", - "npm-registry-fetch": "^11.0.0", + "npm-registry-fetch": "^12.0.0", "promise-retry": "^2.0.1", "read-package-json-fast": "^2.0.1", "rimraf": "^3.0.2", diff --git a/deps/npm/package.json b/deps/npm/package.json index 508f6158471235..2486a1aadf87ef 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "8.3.2", + "version": "8.4.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -55,7 +55,7 @@ }, "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^4.2.1", + "@npmcli/arborist": "^4.3.0", "@npmcli/ci-detect": "^1.4.0", "@npmcli/config": "^2.4.0", "@npmcli/map-workspaces": "^2.0.0", @@ -79,18 +79,18 @@ "init-package-json": "^2.0.5", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^2.3.1", - "libnpmaccess": "^5.0.0", + "libnpmaccess": "^5.0.1", "libnpmdiff": "^3.0.0", - "libnpmexec": "^3.0.2", + "libnpmexec": "^3.0.3", "libnpmfund": "^2.0.2", - "libnpmhook": "^7.0.0", - "libnpmorg": "^3.0.0", + "libnpmhook": "^7.0.1", + "libnpmorg": "^3.0.1", "libnpmpack": "^3.0.1", - "libnpmpublish": "^5.0.0", - "libnpmsearch": "^4.0.0", - "libnpmteam": "^3.0.0", + "libnpmpublish": "^5.0.1", + "libnpmsearch": "^4.0.1", + "libnpmteam": "^3.0.1", "libnpmversion": "^2.0.2", - "make-fetch-happen": "^9.1.0", + "make-fetch-happen": "^10.0.0", "minipass": "^3.1.6", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -102,12 +102,12 @@ "npm-install-checks": "^4.0.0", "npm-package-arg": "^8.1.5", "npm-pick-manifest": "^6.1.1", - "npm-profile": "^5.0.3", - "npm-registry-fetch": "^11.0.0", + "npm-profile": "^6.0.0", + "npm-registry-fetch": "^12.0.1", "npm-user-validate": "^1.0.1", "npmlog": "^6.0.0", "opener": "^1.5.2", - "pacote": "^12.0.2", + "pacote": "^12.0.3", "parse-conflict-json": "^2.0.1", "proc-log": "^1.0.0", "qrcode-terminal": "^0.12.0",