From b20c2422369083e49b6bcab1577d30d7799fd38c Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:37:21 -0700 Subject: [PATCH 01/16] deps: cacache@17.1.3 --- node_modules/cacache/lib/util/glob.js | 3 ++- node_modules/cacache/package.json | 2 +- package-lock.json | 8 ++++---- package.json | 2 +- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/node_modules/cacache/lib/util/glob.js b/node_modules/cacache/lib/util/glob.js index 3132a4da65e63..8500c1c16a429 100644 --- a/node_modules/cacache/lib/util/glob.js +++ b/node_modules/cacache/lib/util/glob.js @@ -1,6 +1,7 @@ 'use strict' const { glob } = require('glob') +const path = require('path') -const globify = (pattern) => pattern.split('//').join('/') +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index db17e3a41bc5e..a6f6f9bdfc465 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "17.1.2", + "version": "17.1.3", "cache-version": { "content": "2", "index": "5" diff --git a/package-lock.json b/package-lock.json index 629f05d6ab000..d9c97b0f7d1ef 100644 --- a/package-lock.json +++ b/package-lock.json @@ -92,7 +92,7 @@ "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^17.1.2", + "cacache": "^17.1.3", "chalk": "^5.2.0", "ci-info": "^3.8.0", "cli-columns": "^4.0.0", @@ -3438,9 +3438,9 @@ } }, "node_modules/cacache": { - "version": "17.1.2", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.2.tgz", - "integrity": "sha512-VcRDUtZd9r7yfGDpdm3dBDBSQbLd19IqWs9q1tuB9g6kmxYLwIjfLngRKMCfDHxReuf0SBclRuYn66Xds7jzUQ==", + "version": "17.1.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.3.tgz", + "integrity": "sha512-jAdjGxmPxZh0IipMdR7fK/4sDSrHMLUV0+GvVUsjwyGNKHsh79kW/otg+GkbXwl6Uzvy9wsvHOX4nUoWldeZMg==", "inBundle": true, "dependencies": { "@npmcli/fs": "^3.1.0", diff --git a/package.json b/package.json index cca9bde90c4da..b256a78c6c4a9 100644 --- a/package.json +++ b/package.json @@ -59,7 +59,7 @@ "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^17.1.2", + "cacache": "^17.1.3", "chalk": "^5.2.0", "ci-info": "^3.8.0", "cli-columns": "^4.0.0", From 61d7ca341b640ffe02e1143ae73f39874dd923ca Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:39:17 -0700 Subject: [PATCH 02/16] deps: glob@10.2.7 minimatch@9.0.1 --- node_modules/glob/dist/cjs/package.json | 99 +----------------------- node_modules/glob/dist/mjs/package.json | 2 +- node_modules/glob/package.json | 13 ++-- node_modules/minimatch/dist/cjs/index.js | 44 +++++------ node_modules/minimatch/dist/mjs/index.js | 44 +++++------ node_modules/minimatch/package.json | 2 +- package-lock.json | 18 ++--- package.json | 2 +- 8 files changed, 60 insertions(+), 164 deletions(-) diff --git a/node_modules/glob/dist/cjs/package.json b/node_modules/glob/dist/cjs/package.json index 8762de67dc4d5..44b67c307f1c8 100644 --- a/node_modules/glob/dist/cjs/package.json +++ b/node_modules/glob/dist/cjs/package.json @@ -1,99 +1,4 @@ { - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "name": "glob", - "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.2.4", - "bin": "./dist/cjs/src/bin.js", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-glob.git" - }, - "main": "./dist/cjs/src/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/mjs/index.d.ts", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/src/index.d.ts", - "default": "./dist/cjs/src/index.js" - } - } - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", - "prepublish": "npm run benchclean", - "profclean": "rm -f v8.log profile.txt", - "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts", - "prebench": "npm run prepare", - "bench": "bash benchmark.sh", - "preprof": "npm run prepare", - "prof": "bash prof.sh", - "benchclean": "node benchclean.js" - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.0.3", - "minimatch": "^9.0.0", - "minipass": "^5.0.0 || ^6.0.0", - "path-scurry": "^1.7.0" - }, - "devDependencies": { - "@types/node": "^18.11.18", - "@types/tap": "^15.0.7", - "c8": "^7.12.0", - "memfs": "^3.4.13", - "mkdirp": "^2.1.4", - "prettier": "^2.8.3", - "rimraf": "^4.1.3", - "tap": "^16.3.4", - "ts-node": "^10.9.1", - "typedoc": "^0.23.24", - "typescript": "^4.9.4" - }, - "tap": { - "before": "test/00-setup.ts", - "coverage": false, - "node-arg": [ - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "license": "ISC", - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - } + "version": "10.2.7", + "type": "commonjs" } diff --git a/node_modules/glob/dist/mjs/package.json b/node_modules/glob/dist/mjs/package.json index e066bfabfb543..ac4c42f81fbd8 100644 --- a/node_modules/glob/dist/mjs/package.json +++ b/node_modules/glob/dist/mjs/package.json @@ -1,4 +1,4 @@ { - "version": "10.2.3", + "version": "10.2.7", "type": "module" } diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json index e11e8e3302579..ba9732c0f6de5 100644 --- a/node_modules/glob/package.json +++ b/node_modules/glob/package.json @@ -1,8 +1,8 @@ { - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "author": "Isaac Z. Schlueter (https://blog.izs.me/)", "name": "glob", "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.2.4", + "version": "10.2.7", "bin": "./dist/cjs/src/bin.js", "repository": { "type": "git", @@ -31,8 +31,7 @@ "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", "test": "c8 tap", @@ -62,12 +61,12 @@ "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", - "minimatch": "^9.0.0", - "minipass": "^5.0.0 || ^6.0.0", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2", "path-scurry": "^1.7.0" }, "devDependencies": { - "@types/node": "^18.11.18", + "@types/node": "^20.2.1", "@types/tap": "^15.0.7", "c8": "^7.12.0", "memfs": "^3.4.13", diff --git a/node_modules/minimatch/dist/cjs/index.js b/node_modules/minimatch/dist/cjs/index.js index 3cbc67f892f12..d70e681fef5d7 100644 --- a/node_modules/minimatch/dist/cjs/index.js +++ b/node_modules/minimatch/dist/cjs/index.js @@ -608,39 +608,35 @@ class Minimatch { // the parts match. matchOne(file, pattern, partial = false) { const options = this.options; - // a UNC pattern like //?/c:/* can match a path like c:/x - // and vice versa + // UNC paths like //?/X:/... can match X:/... and vice versa + // Drive letters in absolute drive or unc paths are always compared + // case-insensitively. if (this.isWindows) { - const fileUNC = file[0] === '' && + const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]); + const fileUNC = !fileDrive && + file[0] === '' && file[1] === '' && file[2] === '?' && - typeof file[3] === 'string' && /^[a-z]:$/i.test(file[3]); - const patternUNC = pattern[0] === '' && + const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]); + const patternUNC = !patternDrive && + pattern[0] === '' && pattern[1] === '' && pattern[2] === '?' && typeof pattern[3] === 'string' && /^[a-z]:$/i.test(pattern[3]); - if (fileUNC && patternUNC) { - const fd = file[3]; - const pd = pattern[3]; + const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined; + const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined; + if (typeof fdi === 'number' && typeof pdi === 'number') { + const [fd, pd] = [file[fdi], pattern[pdi]]; if (fd.toLowerCase() === pd.toLowerCase()) { - file[3] = pd; - } - } - else if (patternUNC && typeof file[0] === 'string') { - const pd = pattern[3]; - const fd = file[0]; - if (pd.toLowerCase() === fd.toLowerCase()) { - pattern[3] = fd; - pattern = pattern.slice(3); - } - } - else if (fileUNC && typeof pattern[0] === 'string') { - const fd = file[3]; - if (fd.toLowerCase() === pattern[0].toLowerCase()) { - pattern[0] = fd; - file = file.slice(3); + pattern[pdi] = fd; + if (pdi > fdi) { + pattern = pattern.slice(pdi); + } + else if (fdi > pdi) { + file = file.slice(fdi); + } } } } diff --git a/node_modules/minimatch/dist/mjs/index.js b/node_modules/minimatch/dist/mjs/index.js index 0d5e956be8818..831b6a67f63fb 100644 --- a/node_modules/minimatch/dist/mjs/index.js +++ b/node_modules/minimatch/dist/mjs/index.js @@ -596,39 +596,35 @@ export class Minimatch { // the parts match. matchOne(file, pattern, partial = false) { const options = this.options; - // a UNC pattern like //?/c:/* can match a path like c:/x - // and vice versa + // UNC paths like //?/X:/... can match X:/... and vice versa + // Drive letters in absolute drive or unc paths are always compared + // case-insensitively. if (this.isWindows) { - const fileUNC = file[0] === '' && + const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]); + const fileUNC = !fileDrive && + file[0] === '' && file[1] === '' && file[2] === '?' && - typeof file[3] === 'string' && /^[a-z]:$/i.test(file[3]); - const patternUNC = pattern[0] === '' && + const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]); + const patternUNC = !patternDrive && + pattern[0] === '' && pattern[1] === '' && pattern[2] === '?' && typeof pattern[3] === 'string' && /^[a-z]:$/i.test(pattern[3]); - if (fileUNC && patternUNC) { - const fd = file[3]; - const pd = pattern[3]; + const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined; + const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined; + if (typeof fdi === 'number' && typeof pdi === 'number') { + const [fd, pd] = [file[fdi], pattern[pdi]]; if (fd.toLowerCase() === pd.toLowerCase()) { - file[3] = pd; - } - } - else if (patternUNC && typeof file[0] === 'string') { - const pd = pattern[3]; - const fd = file[0]; - if (pd.toLowerCase() === fd.toLowerCase()) { - pattern[3] = fd; - pattern = pattern.slice(3); - } - } - else if (fileUNC && typeof pattern[0] === 'string') { - const fd = file[3]; - if (fd.toLowerCase() === pattern[0].toLowerCase()) { - pattern[0] = fd; - file = file.slice(3); + pattern[pdi] = fd; + if (pdi > fdi) { + pattern = pattern.slice(pdi); + } + else if (fdi > pdi) { + file = file.slice(fdi); + } } } } diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json index 06d796a2e4143..d5ee74e334d6a 100644 --- a/node_modules/minimatch/package.json +++ b/node_modules/minimatch/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (http://blog.izs.me)", "name": "minimatch", "description": "a glob matcher in javascript", - "version": "9.0.0", + "version": "9.0.1", "repository": { "type": "git", "url": "git://github.com/isaacs/minimatch.git" diff --git a/package-lock.json b/package-lock.json index d9c97b0f7d1ef..a7a75b1612c10 100644 --- a/package-lock.json +++ b/package-lock.json @@ -100,7 +100,7 @@ "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.2", - "glob": "^10.2.4", + "glob": "^10.2.7", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", "ini": "^4.1.0", @@ -5909,15 +5909,15 @@ "dev": true }, "node_modules/glob": { - "version": "10.2.4", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.2.4.tgz", - "integrity": "sha512-fDboBse/sl1oXSLhIp0FcCJgzW9KmhC/q8ULTKC82zc+DL3TL7FNb8qlt5qqXN53MsKEUSIcb+7DLmEygOE5Yw==", + "version": "10.2.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.2.7.tgz", + "integrity": "sha512-jTKehsravOJo8IJxUGfZILnkvVJM/MOfHRs8QcXolVef2zNI9Tqyy5+SeuOAZd3upViEZQLyFpQhYiHLrMUNmA==", "inBundle": true, "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", - "minimatch": "^9.0.0", - "minipass": "^5.0.0 || ^6.0.0", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2", "path-scurry": "^1.7.0" }, "bin": { @@ -8909,9 +8909,9 @@ } }, "node_modules/minimatch": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.0.tgz", - "integrity": "sha512-0jJj8AvgKqWN05mrwuqi8QYKx1WmYSUoKSxu5Qhs9prezTz10sxAHGNZe9J9cqIJzta8DWsleh2KaVaLl6Ru2w==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.1.tgz", + "integrity": "sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w==", "inBundle": true, "dependencies": { "brace-expansion": "^2.0.1" diff --git a/package.json b/package.json index b256a78c6c4a9..02a0c9ac151a8 100644 --- a/package.json +++ b/package.json @@ -67,7 +67,7 @@ "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.2", - "glob": "^10.2.4", + "glob": "^10.2.7", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", "ini": "^4.1.0", From 99bf9c7b3a122a33e89a5a246944b3e66665f709 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:40:06 -0700 Subject: [PATCH 03/16] deps: ini@4.1.1 --- node_modules/ini/lib/ini.js | 7 ++++--- node_modules/ini/package.json | 6 +++--- package-lock.json | 8 ++++---- package.json | 2 +- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/node_modules/ini/lib/ini.js b/node_modules/ini/lib/ini.js index 763c829c6fc51..724d69d85a0e4 100644 --- a/node_modules/ini/lib/ini.js +++ b/node_modules/ini/lib/ini.js @@ -8,8 +8,9 @@ const encode = (obj, opt = {}) => { opt.newline = opt.newline === true opt.sort = opt.sort === true opt.whitespace = opt.whitespace === true || opt.align === true + // The `typeof` check is required because accessing the `process` directly fails on browsers. /* istanbul ignore next */ - opt.platform = opt.platform || process?.platform + opt.platform = opt.platform || (typeof process !== 'undefined' && process.platform) opt.bracketedArray = opt.bracketedArray !== false /* istanbul ignore next */ @@ -172,8 +173,8 @@ const decode = (str, opt = {}) => { const remove = [] for (const k of Object.keys(out)) { if (!hasOwnProperty.call(out, k) || - typeof out[k] !== 'object' || - Array.isArray(out[k])) { + typeof out[k] !== 'object' || + Array.isArray(out[k])) { continue } diff --git a/node_modules/ini/package.json b/node_modules/ini/package.json index 5dd968e0edb88..c1a50e93c07f9 100644 --- a/node_modules/ini/package.json +++ b/node_modules/ini/package.json @@ -2,7 +2,7 @@ "author": "GitHub Inc.", "name": "ini", "description": "An ini encoder/decoder for node", - "version": "4.1.0", + "version": "4.1.1", "repository": { "type": "git", "url": "https://github.com/npm/ini.git" @@ -20,7 +20,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.13.0", + "@npmcli/template-oss": "4.15.1", "tap": "^16.0.1" }, "license": "ISC", @@ -33,7 +33,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.13.0", + "version": "4.15.1", "publish": "true" }, "tap": { diff --git a/package-lock.json b/package-lock.json index a7a75b1612c10..be6547962b5fa 100644 --- a/package-lock.json +++ b/package-lock.json @@ -103,7 +103,7 @@ "glob": "^10.2.7", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", - "ini": "^4.1.0", + "ini": "^4.1.1", "init-package-json": "^5.0.0", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", @@ -6569,9 +6569,9 @@ "inBundle": true }, "node_modules/ini": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.0.tgz", - "integrity": "sha512-HLR38RSF2iulAzc3I/sma4CoYxQP844rPYCNfzGDOHqa/YqVlwuuZgBx6M50/X8dKgzk0cm1qRg3+47mK2N+cQ==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz", + "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==", "inBundle": true, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/package.json b/package.json index 02a0c9ac151a8..c676a5fd2bba6 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,7 @@ "glob": "^10.2.7", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", - "ini": "^4.1.0", + "ini": "^4.1.1", "init-package-json": "^5.0.0", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", From 98a65d468623b42e01e4c738544a0dd3d32da16b Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:40:57 -0700 Subject: [PATCH 04/16] deps: node-gyp@9.4.0 --- DEPENDENCIES.md | 10 +- node_modules/.gitignore | 22 +- node_modules/@gar/promisify/LICENSE.md | 10 - node_modules/@gar/promisify/index.js | 36 - node_modules/@gar/promisify/package.json | 32 - node_modules/@npmcli/move-file/LICENSE.md | 22 - node_modules/@npmcli/move-file/lib/index.js | 185 --- node_modules/@npmcli/move-file/package.json | 47 - node_modules/exponential-backoff/LICENSE | 202 ++++ .../exponential-backoff/dist/backoff.js | 118 ++ .../dist/delay/always/always.delay.js | 25 + .../dist/delay/delay.base.js | 45 + .../dist/delay/delay.factory.js | 17 + .../dist/delay/delay.interface.js | 3 + .../dist/delay/skip-first/skip-first.delay.js | 82 ++ .../dist/jitter/full/full.jitter.js | 8 + .../dist/jitter/jitter.factory.js | 15 + .../dist/jitter/no/no.jitter.js | 7 + .../exponential-backoff/dist/options.js | 31 + node_modules/exponential-backoff/package.json | 61 + node_modules/infer-owner/LICENSE | 15 - node_modules/infer-owner/index.js | 71 -- node_modules/infer-owner/package.json | 26 - .../gyp/pylib/gyp/generator/eclipse.py | 2 +- .../node-gyp/gyp/pylib/gyp/xcodeproj_file.py | 2 +- .../node-gyp/lib/find-visualstudio.js | 15 +- node_modules/node-gyp/lib/install.js | 266 +++-- .../node_modules/@npmcli/fs/LICENSE.md | 20 - .../@npmcli/fs/lib/common/get-options.js | 20 - .../@npmcli/fs/lib/common/node.js | 9 - .../@npmcli/fs/lib/common/owner-sync.js | 96 -- .../@npmcli/fs/lib/common/owner.js | 96 -- .../node_modules/@npmcli/fs/lib/copy-file.js | 16 - .../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 - .../node_modules/@npmcli/fs/lib/cp/index.js | 22 - .../@npmcli/fs/lib/cp/polyfill.js | 428 ------- .../node_modules/@npmcli/fs/lib/errors.js | 129 -- .../node_modules/@npmcli/fs/lib/fs.js | 14 - .../node_modules/@npmcli/fs/lib/index.js | 12 - .../node_modules/@npmcli/fs/lib/mkdir.js | 19 - .../node_modules/@npmcli/fs/lib/mkdtemp.js | 23 - .../node_modules/@npmcli/fs/lib/rm/index.js | 22 - .../@npmcli/fs/lib/rm/polyfill.js | 239 ---- .../@npmcli/fs/lib/with-owner-sync.js | 21 - .../node_modules/@npmcli/fs/lib/with-owner.js | 21 - .../@npmcli/fs/lib/with-temp-dir.js | 41 - .../node_modules/@npmcli/fs/lib/write-file.js | 14 - .../node_modules/@npmcli/fs/package.json | 50 - .../node-gyp/node_modules/cacache/LICENSE.md | 16 - .../node_modules/cacache/lib/content/path.js | 29 - .../node_modules/cacache/lib/content/read.js | 241 ---- .../node_modules/cacache/lib/content/rm.js | 20 - .../node_modules/cacache/lib/content/write.js | 189 --- .../node_modules/cacache/lib/entry-index.js | 404 ------- .../node-gyp/node_modules/cacache/lib/get.js | 225 ---- .../node_modules/cacache/lib/index.js | 45 - .../node_modules/cacache/lib/memoization.js | 72 -- .../node-gyp/node_modules/cacache/lib/put.js | 80 -- .../node-gyp/node_modules/cacache/lib/rm.js | 31 - .../cacache/lib/util/fix-owner.js | 145 --- .../cacache/lib/util/hash-to-segments.js | 7 - .../cacache/lib/util/move-file.js | 56 - .../node_modules/cacache/lib/util/tmp.js | 33 - .../node_modules/cacache/lib/verify.js | 257 ---- .../node_modules/brace-expansion/LICENSE | 21 - .../node_modules/brace-expansion/index.js | 203 ---- .../node_modules/brace-expansion/package.json | 46 - .../cacache/node_modules/glob/LICENSE | 15 - .../cacache/node_modules/glob/common.js | 244 ---- .../cacache/node_modules/glob/glob.js | 790 ------------ .../cacache/node_modules/glob/package.json | 55 - .../cacache/node_modules/glob/sync.js | 486 -------- .../cacache/node_modules/minimatch/LICENSE | 15 - .../node_modules/minimatch/lib/path.js | 4 - .../node_modules/minimatch/minimatch.js | 944 --------------- .../node_modules/minimatch/package.json | 35 - .../node_modules/cacache/package.json | 84 -- .../node-gyp/node_modules/fs-minipass/LICENSE | 15 - .../node_modules/fs-minipass/index.js | 422 ------- .../node_modules/fs-minipass/package.json | 39 - .../node_modules/make-fetch-happen/LICENSE | 16 - .../make-fetch-happen/lib/agent.js | 214 ---- .../make-fetch-happen/lib/cache/entry.js | 444 ------- .../make-fetch-happen/lib/cache/errors.js | 11 - .../make-fetch-happen/lib/cache/index.js | 49 - .../make-fetch-happen/lib/cache/key.js | 17 - .../make-fetch-happen/lib/cache/policy.js | 161 --- .../node_modules/make-fetch-happen/lib/dns.js | 49 - .../make-fetch-happen/lib/fetch.js | 118 -- .../make-fetch-happen/lib/index.js | 41 - .../make-fetch-happen/lib/options.js | 52 - .../make-fetch-happen/lib/pipeline.js | 41 - .../make-fetch-happen/lib/remote.js | 121 -- .../make-fetch-happen/package.json | 79 -- .../node_modules/minipass-fetch/LICENSE | 28 - .../minipass-fetch/lib/abort-error.js | 17 - .../node_modules/minipass-fetch/lib/blob.js | 97 -- .../node_modules/minipass-fetch/lib/body.js | 350 ------ .../minipass-fetch/lib/fetch-error.js | 32 - .../minipass-fetch/lib/headers.js | 267 ----- .../node_modules/minipass-fetch/lib/index.js | 365 ------ .../minipass-fetch/lib/request.js | 281 ----- .../minipass-fetch/lib/response.js | 90 -- .../node_modules/minipass-fetch/package.json | 67 -- .../node-gyp/node_modules/minipass/LICENSE | 15 - .../node-gyp/node_modules/minipass/index.js | 649 ---------- .../node_modules/minipass/package.json | 56 - .../node-gyp/node_modules/ssri/LICENSE.md | 16 - .../node-gyp/node_modules/ssri/lib/index.js | 524 -------- .../node-gyp/node_modules/ssri/package.json | 63 - .../node_modules/unique-filename/LICENSE | 5 - .../node_modules/unique-filename/lib/index.js | 7 - .../node_modules/unique-filename/package.json | 48 - .../node-gyp/node_modules/unique-slug/LICENSE | 15 - .../node_modules/unique-slug/lib/index.js | 11 - .../node_modules/unique-slug/package.json | 44 - node_modules/node-gyp/package.json | 13 +- .../fixtures/VS_2022_Community_workload.txt | 569 +++++++++ node_modules/node-gyp/test/reporter.js | 75 ++ node_modules/node-gyp/test/test-addon.js | 200 ++-- .../node-gyp/test/test-configure-python.js | 111 +- .../node-gyp/test/test-create-config-gypi.js | 91 +- node_modules/node-gyp/test/test-download.js | 365 +++--- .../test/test-find-accessible-sync.js | 99 +- .../node-gyp/test/test-find-node-directory.js | 196 ++- .../node-gyp/test/test-find-python.js | 353 +++--- .../node-gyp/test/test-find-visualstudio.js | 1056 ++++++++--------- node_modules/node-gyp/test/test-install.js | 151 ++- node_modules/node-gyp/test/test-options.js | 59 +- .../node-gyp/test/test-process-release.js | 751 ++++++------ node_modules/node-gyp/update-gyp.py | 2 +- package-lock.json | 229 +--- package.json | 2 +- 133 files changed, 3189 insertions(+), 13563 deletions(-) delete mode 100644 node_modules/@gar/promisify/LICENSE.md delete mode 100644 node_modules/@gar/promisify/index.js delete mode 100644 node_modules/@gar/promisify/package.json delete mode 100644 node_modules/@npmcli/move-file/LICENSE.md delete mode 100644 node_modules/@npmcli/move-file/lib/index.js delete mode 100644 node_modules/@npmcli/move-file/package.json create mode 100644 node_modules/exponential-backoff/LICENSE create mode 100644 node_modules/exponential-backoff/dist/backoff.js create mode 100644 node_modules/exponential-backoff/dist/delay/always/always.delay.js create mode 100644 node_modules/exponential-backoff/dist/delay/delay.base.js create mode 100644 node_modules/exponential-backoff/dist/delay/delay.factory.js create mode 100644 node_modules/exponential-backoff/dist/delay/delay.interface.js create mode 100644 node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js create mode 100644 node_modules/exponential-backoff/dist/jitter/full/full.jitter.js create mode 100644 node_modules/exponential-backoff/dist/jitter/jitter.factory.js create mode 100644 node_modules/exponential-backoff/dist/jitter/no/no.jitter.js create mode 100644 node_modules/exponential-backoff/dist/options.js create mode 100644 node_modules/exponential-backoff/package.json delete mode 100644 node_modules/infer-owner/LICENSE delete mode 100644 node_modules/infer-owner/index.js delete mode 100644 node_modules/infer-owner/package.json delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner-sync.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner-sync.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js delete mode 100644 node_modules/node-gyp/node_modules/@npmcli/fs/package.json delete mode 100644 node_modules/node-gyp/node_modules/cacache/LICENSE.md delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/path.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/read.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/rm.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/write.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/entry-index.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/get.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/index.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/memoization.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/put.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/rm.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/lib/verify.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/common.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/glob.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/sync.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/lib/path.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/minimatch.js delete mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json delete mode 100644 node_modules/node-gyp/node_modules/cacache/package.json delete mode 100644 node_modules/node-gyp/node_modules/fs-minipass/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/fs-minipass/index.js delete mode 100644 node_modules/node-gyp/node_modules/fs-minipass/package.json delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js delete mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/package.json delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/lib/fetch-error.js delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js delete mode 100644 node_modules/node-gyp/node_modules/minipass-fetch/package.json delete mode 100644 node_modules/node-gyp/node_modules/minipass/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/minipass/index.js delete mode 100644 node_modules/node-gyp/node_modules/minipass/package.json delete mode 100644 node_modules/node-gyp/node_modules/ssri/LICENSE.md delete mode 100644 node_modules/node-gyp/node_modules/ssri/lib/index.js delete mode 100644 node_modules/node-gyp/node_modules/ssri/package.json delete mode 100644 node_modules/node-gyp/node_modules/unique-filename/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/unique-filename/lib/index.js delete mode 100644 node_modules/node-gyp/node_modules/unique-filename/package.json delete mode 100644 node_modules/node-gyp/node_modules/unique-slug/LICENSE delete mode 100644 node_modules/node-gyp/node_modules/unique-slug/lib/index.js delete mode 100644 node_modules/node-gyp/node_modules/unique-slug/package.json create mode 100644 node_modules/node-gyp/test/fixtures/VS_2022_Community_workload.txt create mode 100644 node_modules/node-gyp/test/reporter.js diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index fc1a6bb303057..d14469bdc56ff 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -274,21 +274,15 @@ graph LR; bin-links-->write-file-atomic; brace-expansion-->balanced-match; builtins-->semver; - cacache-->chownr; cacache-->fs-minipass; cacache-->glob; - cacache-->infer-owner; cacache-->lru-cache; cacache-->minipass-collect; cacache-->minipass-flush; cacache-->minipass-pipeline; cacache-->minipass; - cacache-->mkdirp; cacache-->npmcli-fs["@npmcli/fs"]; - cacache-->npmcli-move-file["@npmcli/move-file"]; cacache-->p-map; - cacache-->promise-inflight; - cacache-->rimraf; cacache-->ssri; cacache-->tar; cacache-->unique-filename; @@ -464,7 +458,6 @@ graph LR; make-fetch-happen-->https-proxy-agent; make-fetch-happen-->is-lambda; make-fetch-happen-->lru-cache; - make-fetch-happen-->minipass-collect; make-fetch-happen-->minipass-fetch; make-fetch-happen-->minipass-flush; make-fetch-happen-->minipass-pipeline; @@ -488,6 +481,7 @@ graph LR; minizlib-->minipass; minizlib-->yallist; node-gyp-->env-paths; + node-gyp-->exponential-backoff; node-gyp-->glob; node-gyp-->graceful-fs; node-gyp-->make-fetch-happen; @@ -707,8 +701,6 @@ graph LR; npmcli-mock-registry-->npmcli-template-oss["@npmcli/template-oss"]; npmcli-mock-registry-->pacote; npmcli-mock-registry-->tap; - npmcli-move-file-->mkdirp; - npmcli-move-file-->rimraf; npmcli-package-json-->glob; npmcli-package-json-->json-parse-even-better-errors; npmcli-package-json-->normalize-package-data; diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 1db4a271b386c..a96ea88cb87dd 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -6,9 +6,6 @@ !/@colors/ /@colors/* !/@colors/colors -!/@gar/ -/@gar/* -!/@gar/promisify !/@isaacs/ /@isaacs/* !/@isaacs/cliui @@ -29,7 +26,6 @@ !/@npmcli/installed-package-contents !/@npmcli/map-workspaces !/@npmcli/metavuln-calculator -!/@npmcli/move-file !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json @@ -103,6 +99,7 @@ !/err-code !/event-target-shim !/events +!/exponential-backoff !/fastest-levenshtein !/foreground-child !/fs-minipass @@ -123,7 +120,6 @@ !/ignore-walk !/imurmurhash !/indent-string -!/infer-owner !/inflight !/inherits !/ini @@ -188,32 +184,16 @@ !/node-gyp !/node-gyp/node_modules/ /node-gyp/node_modules/* -!/node-gyp/node_modules/@npmcli/ -/node-gyp/node_modules/@npmcli/* -!/node-gyp/node_modules/@npmcli/fs !/node-gyp/node_modules/abbrev !/node-gyp/node_modules/are-we-there-yet !/node-gyp/node_modules/brace-expansion -!/node-gyp/node_modules/cacache -!/node-gyp/node_modules/cacache/node_modules/ -/node-gyp/node_modules/cacache/node_modules/* -!/node-gyp/node_modules/cacache/node_modules/brace-expansion -!/node-gyp/node_modules/cacache/node_modules/glob -!/node-gyp/node_modules/cacache/node_modules/minimatch -!/node-gyp/node_modules/fs-minipass !/node-gyp/node_modules/gauge !/node-gyp/node_modules/glob -!/node-gyp/node_modules/make-fetch-happen !/node-gyp/node_modules/minimatch -!/node-gyp/node_modules/minipass-fetch -!/node-gyp/node_modules/minipass !/node-gyp/node_modules/nopt !/node-gyp/node_modules/npmlog !/node-gyp/node_modules/readable-stream !/node-gyp/node_modules/signal-exit -!/node-gyp/node_modules/ssri -!/node-gyp/node_modules/unique-filename -!/node-gyp/node_modules/unique-slug !/node-gyp/node_modules/which !/nopt !/normalize-package-data diff --git a/node_modules/@gar/promisify/LICENSE.md b/node_modules/@gar/promisify/LICENSE.md deleted file mode 100644 index 64f773240bed1..0000000000000 --- a/node_modules/@gar/promisify/LICENSE.md +++ /dev/null @@ -1,10 +0,0 @@ -The MIT License (MIT) - -Copyright © 2020-2022 Michael Garvin - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/node_modules/@gar/promisify/index.js b/node_modules/@gar/promisify/index.js deleted file mode 100644 index d0be95f6fec61..0000000000000 --- a/node_modules/@gar/promisify/index.js +++ /dev/null @@ -1,36 +0,0 @@ -'use strict' - -const { promisify } = require('util') - -const handler = { - get: function (target, prop, receiver) { - if (typeof target[prop] !== 'function') { - return target[prop] - } - if (target[prop][promisify.custom]) { - return function () { - return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments) - } - } - return function () { - return new Promise((resolve, reject) => { - Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) { - if (err) { - return reject(err) - } - resolve(result) - }]) - }) - } - } -} - -module.exports = function (thingToPromisify) { - if (typeof thingToPromisify === 'function') { - return promisify(thingToPromisify) - } - if (typeof thingToPromisify === 'object') { - return new Proxy(thingToPromisify, handler) - } - throw new TypeError('Can only promisify functions or objects') -} diff --git a/node_modules/@gar/promisify/package.json b/node_modules/@gar/promisify/package.json deleted file mode 100644 index d0ce69b2f7c0e..0000000000000 --- a/node_modules/@gar/promisify/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "@gar/promisify", - "version": "1.1.3", - "description": "Promisify an entire class or object", - "main": "index.js", - "repository": { - "type": "git", - "url": "https://github.com/wraithgar/gar-promisify.git" - }, - "scripts": { - "lint": "standard", - "lint:fix": "standard --fix", - "test": "lab -a @hapi/code -t 100", - "posttest": "npm run lint" - }, - "files": [ - "index.js" - ], - "keywords": [ - "promisify", - "all", - "class", - "object" - ], - "author": "Gar ", - "license": "MIT", - "devDependencies": { - "@hapi/code": "^8.0.1", - "@hapi/lab": "^24.1.0", - "standard": "^16.0.3" - } -} diff --git a/node_modules/@npmcli/move-file/LICENSE.md b/node_modules/@npmcli/move-file/LICENSE.md deleted file mode 100644 index 072bf20840acd..0000000000000 --- a/node_modules/@npmcli/move-file/LICENSE.md +++ /dev/null @@ -1,22 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) -Copyright (c) npm, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@npmcli/move-file/lib/index.js b/node_modules/@npmcli/move-file/lib/index.js deleted file mode 100644 index 5789bb127e096..0000000000000 --- a/node_modules/@npmcli/move-file/lib/index.js +++ /dev/null @@ -1,185 +0,0 @@ -const { dirname, join, resolve, relative, isAbsolute } = require('path') -const rimraf_ = require('rimraf') -const { promisify } = require('util') -const { - access: access_, - accessSync, - copyFile: copyFile_, - copyFileSync, - readdir: readdir_, - readdirSync, - rename: rename_, - renameSync, - stat: stat_, - statSync, - lstat: lstat_, - lstatSync, - symlink: symlink_, - symlinkSync, - readlink: readlink_, - readlinkSync, -} = require('fs') - -const access = promisify(access_) -const copyFile = promisify(copyFile_) -const readdir = promisify(readdir_) -const rename = promisify(rename_) -const stat = promisify(stat_) -const lstat = promisify(lstat_) -const symlink = promisify(symlink_) -const readlink = promisify(readlink_) -const rimraf = promisify(rimraf_) -const rimrafSync = rimraf_.sync - -const mkdirp = require('mkdirp') - -const pathExists = async path => { - try { - await access(path) - return true - } catch (er) { - return er.code !== 'ENOENT' - } -} - -const pathExistsSync = path => { - try { - accessSync(path) - return true - } catch (er) { - return er.code !== 'ENOENT' - } -} - -const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { - if (!source || !destination) { - throw new TypeError('`source` and `destination` file required') - } - - options = { - overwrite: true, - ...options, - } - - if (!options.overwrite && await pathExists(destination)) { - throw new Error(`The destination file exists: ${destination}`) - } - - await mkdirp(dirname(destination)) - - try { - await rename(source, destination) - } catch (error) { - if (error.code === 'EXDEV' || error.code === 'EPERM') { - const sourceStat = await lstat(source) - if (sourceStat.isDirectory()) { - const files = await readdir(source) - await Promise.all(files.map((file) => - moveFile(join(source, file), join(destination, file), options, false, symlinks) - )) - } else if (sourceStat.isSymbolicLink()) { - symlinks.push({ source, destination }) - } else { - await copyFile(source, destination) - } - } else { - throw error - } - } - - if (root) { - await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { - let target = await readlink(symSource) - // junction symlinks in windows will be absolute paths, so we need to - // make sure they point to the symlink destination - if (isAbsolute(target)) { - target = resolve(symDestination, relative(symSource, target)) - } - // try to determine what the actual file is so we can create the correct - // type of symlink in windows - let targetStat = 'file' - try { - targetStat = await stat(resolve(dirname(symSource), target)) - if (targetStat.isDirectory()) { - targetStat = 'junction' - } - } catch { - // targetStat remains 'file' - } - await symlink( - target, - symDestination, - targetStat - ) - })) - await rimraf(source) - } -} - -const moveFileSync = (source, destination, options = {}, root = true, symlinks = []) => { - if (!source || !destination) { - throw new TypeError('`source` and `destination` file required') - } - - options = { - overwrite: true, - ...options, - } - - if (!options.overwrite && pathExistsSync(destination)) { - throw new Error(`The destination file exists: ${destination}`) - } - - mkdirp.sync(dirname(destination)) - - try { - renameSync(source, destination) - } catch (error) { - if (error.code === 'EXDEV' || error.code === 'EPERM') { - const sourceStat = lstatSync(source) - if (sourceStat.isDirectory()) { - const files = readdirSync(source) - for (const file of files) { - moveFileSync(join(source, file), join(destination, file), options, false, symlinks) - } - } else if (sourceStat.isSymbolicLink()) { - symlinks.push({ source, destination }) - } else { - copyFileSync(source, destination) - } - } else { - throw error - } - } - - if (root) { - for (const { source: symSource, destination: symDestination } of symlinks) { - let target = readlinkSync(symSource) - // junction symlinks in windows will be absolute paths, so we need to - // make sure they point to the symlink destination - if (isAbsolute(target)) { - target = resolve(symDestination, relative(symSource, target)) - } - // try to determine what the actual file is so we can create the correct - // type of symlink in windows - let targetStat = 'file' - try { - targetStat = statSync(resolve(dirname(symSource), target)) - if (targetStat.isDirectory()) { - targetStat = 'junction' - } - } catch { - // targetStat remains 'file' - } - symlinkSync( - target, - symDestination, - targetStat - ) - } - rimrafSync(source) - } -} - -module.exports = moveFile -module.exports.sync = moveFileSync diff --git a/node_modules/@npmcli/move-file/package.json b/node_modules/@npmcli/move-file/package.json deleted file mode 100644 index 58793b93a9ca0..0000000000000 --- a/node_modules/@npmcli/move-file/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "@npmcli/move-file", - "version": "2.0.1", - "files": [ - "bin/", - "lib/" - ], - "main": "lib/index.js", - "description": "move a file (fork of move-file)", - "dependencies": { - "mkdirp": "^1.0.4", - "rimraf": "^3.0.2" - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.0.1" - }, - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/move-file.git" - }, - "tap": { - "check-coverage": true - }, - "license": "MIT", - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/exponential-backoff/LICENSE b/node_modules/exponential-backoff/LICENSE new file mode 100644 index 0000000000000..7a4a3ea2424c0 --- /dev/null +++ b/node_modules/exponential-backoff/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/backoff.js b/node_modules/exponential-backoff/dist/backoff.js new file mode 100644 index 0000000000000..a0aa0dc34b6b1 --- /dev/null +++ b/node_modules/exponential-backoff/dist/backoff.js @@ -0,0 +1,118 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var options_1 = require("./options"); +var delay_factory_1 = require("./delay/delay.factory"); +function backOff(request, options) { + if (options === void 0) { options = {}; } + return __awaiter(this, void 0, void 0, function () { + var sanitizedOptions, backOff; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + sanitizedOptions = options_1.getSanitizedOptions(options); + backOff = new BackOff(request, sanitizedOptions); + return [4 /*yield*/, backOff.execute()]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); +} +exports.backOff = backOff; +var BackOff = /** @class */ (function () { + function BackOff(request, options) { + this.request = request; + this.options = options; + this.attemptNumber = 0; + } + BackOff.prototype.execute = function () { + return __awaiter(this, void 0, void 0, function () { + var e_1, shouldRetry; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!!this.attemptLimitReached) return [3 /*break*/, 7]; + _a.label = 1; + case 1: + _a.trys.push([1, 4, , 6]); + return [4 /*yield*/, this.applyDelay()]; + case 2: + _a.sent(); + return [4 /*yield*/, this.request()]; + case 3: return [2 /*return*/, _a.sent()]; + case 4: + e_1 = _a.sent(); + this.attemptNumber++; + return [4 /*yield*/, this.options.retry(e_1, this.attemptNumber)]; + case 5: + shouldRetry = _a.sent(); + if (!shouldRetry || this.attemptLimitReached) { + throw e_1; + } + return [3 /*break*/, 6]; + case 6: return [3 /*break*/, 0]; + case 7: throw new Error("Something went wrong."); + } + }); + }); + }; + Object.defineProperty(BackOff.prototype, "attemptLimitReached", { + get: function () { + return this.attemptNumber >= this.options.numOfAttempts; + }, + enumerable: true, + configurable: true + }); + BackOff.prototype.applyDelay = function () { + return __awaiter(this, void 0, void 0, function () { + var delay; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + delay = delay_factory_1.DelayFactory(this.options, this.attemptNumber); + return [4 /*yield*/, delay.apply()]; + case 1: + _a.sent(); + return [2 /*return*/]; + } + }); + }); + }; + return BackOff; +}()); +//# sourceMappingURL=backoff.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/delay/always/always.delay.js b/node_modules/exponential-backoff/dist/delay/always/always.delay.js new file mode 100644 index 0000000000000..40e34071e493d --- /dev/null +++ b/node_modules/exponential-backoff/dist/delay/always/always.delay.js @@ -0,0 +1,25 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +var delay_base_1 = require("../delay.base"); +var AlwaysDelay = /** @class */ (function (_super) { + __extends(AlwaysDelay, _super); + function AlwaysDelay() { + return _super !== null && _super.apply(this, arguments) || this; + } + return AlwaysDelay; +}(delay_base_1.Delay)); +exports.AlwaysDelay = AlwaysDelay; +//# sourceMappingURL=always.delay.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/delay/delay.base.js b/node_modules/exponential-backoff/dist/delay/delay.base.js new file mode 100644 index 0000000000000..b146c2fa62041 --- /dev/null +++ b/node_modules/exponential-backoff/dist/delay/delay.base.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var jitter_factory_1 = require("../jitter/jitter.factory"); +var Delay = /** @class */ (function () { + function Delay(options) { + this.options = options; + this.attempt = 0; + } + Delay.prototype.apply = function () { + var _this = this; + return new Promise(function (resolve) { return setTimeout(resolve, _this.jitteredDelay); }); + }; + Delay.prototype.setAttemptNumber = function (attempt) { + this.attempt = attempt; + }; + Object.defineProperty(Delay.prototype, "jitteredDelay", { + get: function () { + var jitter = jitter_factory_1.JitterFactory(this.options); + return jitter(this.delay); + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(Delay.prototype, "delay", { + get: function () { + var constant = this.options.startingDelay; + var base = this.options.timeMultiple; + var power = this.numOfDelayedAttempts; + var delay = constant * Math.pow(base, power); + return Math.min(delay, this.options.maxDelay); + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(Delay.prototype, "numOfDelayedAttempts", { + get: function () { + return this.attempt; + }, + enumerable: true, + configurable: true + }); + return Delay; +}()); +exports.Delay = Delay; +//# sourceMappingURL=delay.base.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/delay/delay.factory.js b/node_modules/exponential-backoff/dist/delay/delay.factory.js new file mode 100644 index 0000000000000..33008dbfc51c4 --- /dev/null +++ b/node_modules/exponential-backoff/dist/delay/delay.factory.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var skip_first_delay_1 = require("./skip-first/skip-first.delay"); +var always_delay_1 = require("./always/always.delay"); +function DelayFactory(options, attempt) { + var delay = initDelayClass(options); + delay.setAttemptNumber(attempt); + return delay; +} +exports.DelayFactory = DelayFactory; +function initDelayClass(options) { + if (!options.delayFirstAttempt) { + return new skip_first_delay_1.SkipFirstDelay(options); + } + return new always_delay_1.AlwaysDelay(options); +} +//# sourceMappingURL=delay.factory.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/delay/delay.interface.js b/node_modules/exponential-backoff/dist/delay/delay.interface.js new file mode 100644 index 0000000000000..6fe2a5a0e9d23 --- /dev/null +++ b/node_modules/exponential-backoff/dist/delay/delay.interface.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=delay.interface.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js b/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js new file mode 100644 index 0000000000000..73f8841dadd01 --- /dev/null +++ b/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js @@ -0,0 +1,82 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var delay_base_1 = require("../delay.base"); +var SkipFirstDelay = /** @class */ (function (_super) { + __extends(SkipFirstDelay, _super); + function SkipFirstDelay() { + return _super !== null && _super.apply(this, arguments) || this; + } + SkipFirstDelay.prototype.apply = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, this.isFirstAttempt ? true : _super.prototype.apply.call(this)]; + }); + }); + }; + Object.defineProperty(SkipFirstDelay.prototype, "isFirstAttempt", { + get: function () { + return this.attempt === 0; + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(SkipFirstDelay.prototype, "numOfDelayedAttempts", { + get: function () { + return this.attempt - 1; + }, + enumerable: true, + configurable: true + }); + return SkipFirstDelay; +}(delay_base_1.Delay)); +exports.SkipFirstDelay = SkipFirstDelay; +//# sourceMappingURL=skip-first.delay.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js b/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js new file mode 100644 index 0000000000000..16cee36bb5fa5 --- /dev/null +++ b/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function fullJitter(delay) { + var jitteredDelay = Math.random() * delay; + return Math.round(jitteredDelay); +} +exports.fullJitter = fullJitter; +//# sourceMappingURL=full.jitter.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/jitter/jitter.factory.js b/node_modules/exponential-backoff/dist/jitter/jitter.factory.js new file mode 100644 index 0000000000000..8aafe45f8fbb0 --- /dev/null +++ b/node_modules/exponential-backoff/dist/jitter/jitter.factory.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var full_jitter_1 = require("./full/full.jitter"); +var no_jitter_1 = require("./no/no.jitter"); +function JitterFactory(options) { + switch (options.jitter) { + case "full": + return full_jitter_1.fullJitter; + case "none": + default: + return no_jitter_1.noJitter; + } +} +exports.JitterFactory = JitterFactory; +//# sourceMappingURL=jitter.factory.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js b/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js new file mode 100644 index 0000000000000..15a40bb2a7bd6 --- /dev/null +++ b/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function noJitter(delay) { + return delay; +} +exports.noJitter = noJitter; +//# sourceMappingURL=no.jitter.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/dist/options.js b/node_modules/exponential-backoff/dist/options.js new file mode 100644 index 0000000000000..1d2ca1705dcfd --- /dev/null +++ b/node_modules/exponential-backoff/dist/options.js @@ -0,0 +1,31 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var defaultOptions = { + delayFirstAttempt: false, + jitter: "none", + maxDelay: Infinity, + numOfAttempts: 10, + retry: function () { return true; }, + startingDelay: 100, + timeMultiple: 2 +}; +function getSanitizedOptions(options) { + var sanitized = __assign(__assign({}, defaultOptions), options); + if (sanitized.numOfAttempts < 1) { + sanitized.numOfAttempts = 1; + } + return sanitized; +} +exports.getSanitizedOptions = getSanitizedOptions; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/exponential-backoff/package.json b/node_modules/exponential-backoff/package.json new file mode 100644 index 0000000000000..23232a0df2c57 --- /dev/null +++ b/node_modules/exponential-backoff/package.json @@ -0,0 +1,61 @@ +{ + "name": "exponential-backoff", + "version": "3.1.1", + "description": "A utility that allows retrying a function with an exponential delay between attempts.", + "files": [ + "dist/" + ], + "main": "dist/backoff.js", + "types": "dist/backoff.d.ts", + "scripts": { + "build": "tsc", + "test": "jest", + "test:watch": "jest --watch" + }, + "husky": { + "hooks": { + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "*.{ts,json,md}": [ + "prettier --write", + "git add" + ] + }, + "jest": { + "transform": { + "^.+\\.ts$": "ts-jest" + }, + "testRegex": "\\.spec\\.ts$", + "moduleFileExtensions": [ + "ts", + "js" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/coveo/exponential-backoff.git" + }, + "keywords": [ + "exponential", + "backoff", + "retry" + ], + "author": "Sami Sayegh", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/coveo/exponential-backoff/issues" + }, + "homepage": "https://github.com/coveo/exponential-backoff#readme", + "devDependencies": { + "@types/jest": "^24.0.18", + "@types/node": "^10.14.21", + "husky": "^3.0.9", + "jest": "^24.9.0", + "lint-staged": "^9.4.2", + "prettier": "^1.18.2", + "ts-jest": "^24.1.0", + "typescript": "^3.6.4" + } +} diff --git a/node_modules/infer-owner/LICENSE b/node_modules/infer-owner/LICENSE deleted file mode 100644 index 20a4762540923..0000000000000 --- a/node_modules/infer-owner/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) npm, Inc. and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/infer-owner/index.js b/node_modules/infer-owner/index.js deleted file mode 100644 index a7bddcbd2288b..0000000000000 --- a/node_modules/infer-owner/index.js +++ /dev/null @@ -1,71 +0,0 @@ -const cache = new Map() -const fs = require('fs') -const { dirname, resolve } = require('path') - - -const lstat = path => new Promise((res, rej) => - fs.lstat(path, (er, st) => er ? rej(er) : res(st))) - -const inferOwner = path => { - path = resolve(path) - if (cache.has(path)) - return Promise.resolve(cache.get(path)) - - const statThen = st => { - const { uid, gid } = st - cache.set(path, { uid, gid }) - return { uid, gid } - } - const parent = dirname(path) - const parentTrap = parent === path ? null : er => { - return inferOwner(parent).then((owner) => { - cache.set(path, owner) - return owner - }) - } - return lstat(path).then(statThen, parentTrap) -} - -const inferOwnerSync = path => { - path = resolve(path) - if (cache.has(path)) - return cache.get(path) - - const parent = dirname(path) - - // avoid obscuring call site by re-throwing - // "catch" the error by returning from a finally, - // only if we're not at the root, and the parent call works. - let threw = true - try { - const st = fs.lstatSync(path) - threw = false - const { uid, gid } = st - cache.set(path, { uid, gid }) - return { uid, gid } - } finally { - if (threw && parent !== path) { - const owner = inferOwnerSync(parent) - cache.set(path, owner) - return owner // eslint-disable-line no-unsafe-finally - } - } -} - -const inflight = new Map() -module.exports = path => { - path = resolve(path) - if (inflight.has(path)) - return Promise.resolve(inflight.get(path)) - const p = inferOwner(path).then(owner => { - inflight.delete(path) - return owner - }) - inflight.set(path, p) - return p -} -module.exports.sync = inferOwnerSync -module.exports.clearCache = () => { - cache.clear() - inflight.clear() -} diff --git a/node_modules/infer-owner/package.json b/node_modules/infer-owner/package.json deleted file mode 100644 index c4b2b6e6df206..0000000000000 --- a/node_modules/infer-owner/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "infer-owner", - "version": "1.0.4", - "description": "Infer the owner of a path based on the owner of its nearest existing parent", - "author": "Isaac Z. Schlueter (https://izs.me)", - "license": "ISC", - "scripts": { - "test": "tap -J test/*.js --100", - "snap": "TAP_SNAPSHOT=1 tap -J test/*.js --100", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" - }, - "devDependencies": { - "mutate-fs": "^2.1.1", - "tap": "^12.4.2" - }, - "main": "index.js", - "repository": "https://github.com/npm/infer-owner", - "publishConfig": { - "access": "public" - }, - "files": [ - "index.js" - ] -} diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py index 1ff0dc83ae200..a851b4db757ed 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py @@ -24,7 +24,7 @@ import gyp.common import gyp.msvs_emulation import shlex -import xml.etree.cElementTree as ET +import xml.etree.ElementTree as ET generator_wants_static_library_dependencies_adjusted = False diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py index 0e941eb4719ea..4e0ec5e8828f7 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py @@ -2770,7 +2770,7 @@ def __init__(self, properties=None, id=None, parent=None, path=None): self.path = path self._other_pbxprojects = {} # super - return XCContainerPortal.__init__(self, properties, id, parent) + XCContainerPortal.__init__(self, properties, id, parent) def Name(self): name = self.path diff --git a/node_modules/node-gyp/lib/find-visualstudio.js b/node_modules/node-gyp/lib/find-visualstudio.js index d3815112e65de..16f6e79559307 100644 --- a/node_modules/node-gyp/lib/find-visualstudio.js +++ b/node_modules/node-gyp/lib/find-visualstudio.js @@ -266,10 +266,15 @@ VisualStudioFinder.prototype = { return {} }, + msBuildPathExists: function msBuildPathExists (path) { + return fs.existsSync(path) + }, + // Helper - process MSBuild information getMSBuild: function getMSBuild (info, versionYear) { const pkg = 'Microsoft.VisualStudio.VC.MSBuild.Base' const msbuildPath = path.join(info.path, 'MSBuild', 'Current', 'Bin', 'MSBuild.exe') + const msbuildPathArm64 = path.join(info.path, 'MSBuild', 'Current', 'Bin', 'arm64', 'MSBuild.exe') if (info.packages.indexOf(pkg) !== -1) { this.log.silly('- found VC.MSBuild.Base') if (versionYear === 2017) { @@ -279,8 +284,14 @@ VisualStudioFinder.prototype = { return msbuildPath } } - // visual studio 2022 don't has msbuild pkg - if (fs.existsSync(msbuildPath)) { + /** + * Visual Studio 2022 doesn't have the MSBuild package. + * Support for compiling _on_ ARM64 was added in MSVC 14.32.31326, + * so let's leverage it if the user has an ARM64 device. + */ + if (process.arch === 'arm64' && this.msBuildPathExists(msbuildPathArm64)) { + return msbuildPathArm64 + } else if (this.msBuildPathExists(msbuildPath)) { return msbuildPath } return null diff --git a/node_modules/node-gyp/lib/install.js b/node_modules/node-gyp/lib/install.js index 99f6d8592a3fd..1eb9f14c6742a 100644 --- a/node_modules/node-gyp/lib/install.js +++ b/node_modules/node-gyp/lib/install.js @@ -2,6 +2,8 @@ const fs = require('graceful-fs') const os = require('os') +const { backOff } = require('exponential-backoff') +const rm = require('rimraf') const tar = require('tar') const path = require('path') const util = require('util') @@ -20,6 +22,10 @@ const streamPipeline = util.promisify(stream.pipeline) async function install (fs, gyp, argv) { const release = processRelease(argv, gyp, process.version, process.release) + // Detecting target_arch based on logic from create-cnfig-gyp.js. Used on Windows only. + const arch = win ? (gyp.opts.target_arch || gyp.opts.arch || process.arch || 'ia32') : '' + // Used to prevent downloading tarball if only new node.lib is required on Windows. + let shouldDownloadTarball = true // Determine which node dev files version we are installing log.verbose('install', 'input version string %j', release.version) @@ -90,6 +96,26 @@ async function install (fs, gyp, argv) { } } log.verbose('install', 'version is good') + if (win) { + log.verbose('on Windows; need to check node.lib') + const nodeLibPath = path.resolve(devDir, arch, 'node.lib') + try { + await fs.promises.stat(nodeLibPath) + } catch (err) { + if (err.code === 'ENOENT') { + log.verbose('install', `version not already installed for ${arch}, continuing with install`, release.version) + try { + shouldDownloadTarball = false + return await go() + } catch (err) { + return rollback(err) + } + } else if (err.code === 'EACCES') { + return eaccesFallback(err) + } + throw err + } + } } else { try { return await go() @@ -98,15 +124,49 @@ async function install (fs, gyp, argv) { } } + async function copyDirectory (src, dest) { + try { + await fs.promises.stat(src) + } catch { + throw new Error(`Missing source directory for copy: ${src}`) + } + await fs.promises.mkdir(dest, { recursive: true }) + const entries = await fs.promises.readdir(src, { withFileTypes: true }) + for (const entry of entries) { + if (entry.isDirectory()) { + await copyDirectory(path.join(src, entry.name), path.join(dest, entry.name)) + } else if (entry.isFile()) { + // with parallel installs, copying files may cause file errors on + // Windows so use an exponential backoff to resolve collisions + await backOff(async () => { + try { + await fs.promises.copyFile(path.join(src, entry.name), path.join(dest, entry.name)) + } catch (err) { + // if ensure, check if file already exists and that's good enough + if (gyp.opts.ensure && err.code === 'EBUSY') { + try { + await fs.promises.stat(path.join(dest, entry.name)) + return + } catch {} + } + throw err + } + }) + } else { + throw new Error('Unexpected file directory entry type') + } + } + } + async function go () { - log.verbose('ensuring nodedir is created', devDir) + log.verbose('ensuring devDir is created', devDir) // first create the dir for the node dev files try { const created = await fs.promises.mkdir(devDir, { recursive: true }) if (created) { - log.verbose('created nodedir', created) + log.verbose('created devDir', created) } } catch (err) { if (err.code === 'EACCES') { @@ -118,6 +178,7 @@ async function install (fs, gyp, argv) { // now download the node tarball const tarPath = gyp.opts.tarball + let extractErrors = false let extractCount = 0 const contentShasums = {} const expectShasums = {} @@ -136,71 +197,102 @@ async function install (fs, gyp, argv) { return isValid } - // download the tarball and extract! + function onwarn (code, message) { + extractErrors = true + log.error('error while extracting tarball', code, message) + } - if (tarPath) { - await tar.extract({ - file: tarPath, - strip: 1, - filter: isValid, - cwd: devDir - }) - } else { - try { - const res = await download(gyp, release.tarballUrl) + // download the tarball and extract! + // Ommited on Windows if only new node.lib is required - if (res.status !== 200) { - throw new Error(`${res.status} response downloading ${release.tarballUrl}`) - } + // on Windows there can be file errors from tar if parallel installs + // are happening (not uncommon with multiple native modules) so + // extract the tarball to a temp directory first and then copy over + const tarExtractDir = win ? await fs.promises.mkdtemp(path.join(os.tmpdir(), 'node-gyp-tmp-')) : devDir - await streamPipeline( - res.body, - // content checksum - new ShaSum((_, checksum) => { - const filename = path.basename(release.tarballUrl).trim() - contentShasums[filename] = checksum - log.verbose('content checksum', filename, checksum) - }), - tar.extract({ + try { + if (shouldDownloadTarball) { + if (tarPath) { + await tar.extract({ + file: tarPath, strip: 1, - cwd: devDir, - filter: isValid + filter: isValid, + onwarn, + cwd: tarExtractDir }) - ) - } catch (err) { - // something went wrong downloading the tarball? - if (err.code === 'ENOTFOUND') { - throw new Error('This is most likely not a problem with node-gyp or the package itself and\n' + - 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' + - 'network settings.') + } else { + try { + const res = await download(gyp, release.tarballUrl) + + if (res.status !== 200) { + throw new Error(`${res.status} response downloading ${release.tarballUrl}`) + } + + await streamPipeline( + res.body, + // content checksum + new ShaSum((_, checksum) => { + const filename = path.basename(release.tarballUrl).trim() + contentShasums[filename] = checksum + log.verbose('content checksum', filename, checksum) + }), + tar.extract({ + strip: 1, + cwd: tarExtractDir, + filter: isValid, + onwarn + }) + ) + } catch (err) { + // something went wrong downloading the tarball? + if (err.code === 'ENOTFOUND') { + throw new Error('This is most likely not a problem with node-gyp or the package itself and\n' + + 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' + + 'network settings.') + } + throw err + } } - throw err - } - } - // invoked after the tarball has finished being extracted - if (extractCount === 0) { - throw new Error('There was a fatal problem while downloading/extracting the tarball') - } + // invoked after the tarball has finished being extracted + if (extractErrors || extractCount === 0) { + throw new Error('There was a fatal problem while downloading/extracting the tarball') + } - log.verbose('tarball', 'done parsing tarball') + log.verbose('tarball', 'done parsing tarball') + } - const installVersionPath = path.resolve(devDir, 'installVersion') - await Promise.all([ + const installVersionPath = path.resolve(tarExtractDir, 'installVersion') + await Promise.all([ // need to download node.lib - ...(win ? downloadNodeLib() : []), - // write the "installVersion" file - fs.promises.writeFile(installVersionPath, gyp.package.installVersion + '\n'), - // Only download SHASUMS.txt if we downloaded something in need of SHA verification - ...(!tarPath || win ? [downloadShasums()] : []) - ]) - - log.verbose('download contents checksum', JSON.stringify(contentShasums)) - // check content shasums - for (const k in contentShasums) { - log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k]) - if (contentShasums[k] !== expectShasums[k]) { - throw new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k]) + ...(win ? [downloadNodeLib()] : []), + // write the "installVersion" file + fs.promises.writeFile(installVersionPath, gyp.package.installVersion + '\n'), + // Only download SHASUMS.txt if we downloaded something in need of SHA verification + ...(!tarPath || win ? [downloadShasums()] : []) + ]) + + log.verbose('download contents checksum', JSON.stringify(contentShasums)) + // check content shasums + for (const k in contentShasums) { + log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k]) + if (contentShasums[k] !== expectShasums[k]) { + throw new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k]) + } + } + + // copy over the files from the temp tarball extract directory to devDir + if (tarExtractDir !== devDir) { + await copyDirectory(tarExtractDir, devDir) + } + } finally { + if (tarExtractDir !== devDir) { + try { + // try to cleanup temp dir + await util.promisify(rm)(tarExtractDir) + } catch { + log.warn('failed to clean up temp tarball extract directory') + } } } @@ -228,43 +320,33 @@ async function install (fs, gyp, argv) { log.verbose('checksum data', JSON.stringify(expectShasums)) } - function downloadNodeLib () { + async function downloadNodeLib () { log.verbose('on Windows; need to download `' + release.name + '.lib`...') - const archs = ['ia32', 'x64', 'arm64'] - return archs.map(async (arch) => { - const dir = path.resolve(devDir, arch) - const targetLibPath = path.resolve(dir, release.name + '.lib') - const { libUrl, libPath } = release[arch] - const name = `${arch} ${release.name}.lib` - log.verbose(name, 'dir', dir) - log.verbose(name, 'url', libUrl) - - await fs.promises.mkdir(dir, { recursive: true }) - log.verbose('streaming', name, 'to:', targetLibPath) - - const res = await download(gyp, libUrl) - - if (res.status === 403 || res.status === 404) { - if (arch === 'arm64') { - // Arm64 is a newer platform on Windows and not all node distributions provide it. - log.verbose(`${name} was not found in ${libUrl}`) - } else { - log.warn(`${name} was not found in ${libUrl}`) - } - return - } else if (res.status !== 200) { - throw new Error(`${res.status} status code downloading ${name}`) - } + const dir = path.resolve(tarExtractDir, arch) + const targetLibPath = path.resolve(dir, release.name + '.lib') + const { libUrl, libPath } = release[arch] + const name = `${arch} ${release.name}.lib` + log.verbose(name, 'dir', dir) + log.verbose(name, 'url', libUrl) + + await fs.promises.mkdir(dir, { recursive: true }) + log.verbose('streaming', name, 'to:', targetLibPath) + + const res = await download(gyp, libUrl) + + // Since only required node.lib is downloaded throw error if it is not fetched + if (res.status !== 200) { + throw new Error(`${res.status} status code downloading ${name}`) + } - return streamPipeline( - res.body, - new ShaSum((_, checksum) => { - contentShasums[libPath] = checksum - log.verbose('content checksum', libPath, checksum) - }), - fs.createWriteStream(targetLibPath) - ) - }) + return streamPipeline( + res.body, + new ShaSum((_, checksum) => { + contentShasums[libPath] = checksum + log.verbose('content checksum', libPath, checksum) + }), + fs.createWriteStream(targetLibPath) + ) } // downloadNodeLib() } // go() diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md b/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md deleted file mode 100644 index 5fc208ff122e0..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md +++ /dev/null @@ -1,20 +0,0 @@ - - -ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this -software for any purpose with or without fee is hereby -granted, provided that the above copyright notice and this -permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO -EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js deleted file mode 100644 index cb5982f79077a..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js +++ /dev/null @@ -1,20 +0,0 @@ -// given an input that may or may not be an object, return an object that has -// a copy of every defined property listed in 'copy'. if the input is not an -// object, assign it to the property named by 'wrap' -const getOptions = (input, { copy, wrap }) => { - const result = {} - - if (input && typeof input === 'object') { - for (const prop of copy) { - if (input[prop] !== undefined) { - result[prop] = input[prop] - } - } - } else { - result[wrap] = input - } - - return result -} - -module.exports = getOptions diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js deleted file mode 100644 index 4d13bc037359d..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js +++ /dev/null @@ -1,9 +0,0 @@ -const semver = require('semver') - -const satisfies = (range) => { - return semver.satisfies(process.version, range, { includePrerelease: true }) -} - -module.exports = { - satisfies, -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner-sync.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner-sync.js deleted file mode 100644 index 3704aa6d18e1e..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner-sync.js +++ /dev/null @@ -1,96 +0,0 @@ -const { dirname, resolve } = require('path') -const url = require('url') - -const fs = require('../fs.js') - -// given a path, find the owner of the nearest parent -const find = (path) => { - // if we have no getuid, permissions are irrelevant on this platform - if (!process.getuid) { - return {} - } - - // fs methods accept URL objects with a scheme of file: so we need to unwrap - // those into an actual path string before we can resolve it - const resolved = path != null && path.href && path.origin - ? resolve(url.fileURLToPath(path)) - : resolve(path) - - let stat - - try { - stat = fs.lstatSync(resolved) - } finally { - // if we got a stat, return its contents - if (stat) { - return { uid: stat.uid, gid: stat.gid } - } - - // try the parent directory - if (resolved !== dirname(resolved)) { - return find(dirname(resolved)) - } - - // no more parents, never got a stat, just return an empty object - return {} - } -} - -// given a path, uid, and gid update the ownership of the path if necessary -const update = (path, uid, gid) => { - // nothing to update, just exit - if (uid === undefined && gid === undefined) { - return - } - - try { - // see if the permissions are already the same, if they are we don't - // need to do anything, so return early - const stat = fs.statSync(path) - if (uid === stat.uid && gid === stat.gid) { - return - } - } catch { - // ignore errors - } - - try { - fs.chownSync(path, uid, gid) - } catch { - // ignore errors - } -} - -// accepts a `path` and the `owner` property of an options object and normalizes -// it into an object with numerical `uid` and `gid` -const validate = (path, input) => { - let uid - let gid - - if (typeof input === 'string' || typeof input === 'number') { - uid = input - gid = input - } else if (input && typeof input === 'object') { - uid = input.uid - gid = input.gid - } - - if (uid === 'inherit' || gid === 'inherit') { - const owner = find(path) - if (uid === 'inherit') { - uid = owner.uid - } - - if (gid === 'inherit') { - gid = owner.gid - } - } - - return { uid, gid } -} - -module.exports = { - find, - update, - validate, -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js deleted file mode 100644 index 9f02d41a5e4b3..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js +++ /dev/null @@ -1,96 +0,0 @@ -const { dirname, resolve } = require('path') -const url = require('url') - -const fs = require('../fs.js') - -// given a path, find the owner of the nearest parent -const find = async (path) => { - // if we have no getuid, permissions are irrelevant on this platform - if (!process.getuid) { - return {} - } - - // fs methods accept URL objects with a scheme of file: so we need to unwrap - // those into an actual path string before we can resolve it - const resolved = path != null && path.href && path.origin - ? resolve(url.fileURLToPath(path)) - : resolve(path) - - let stat - - try { - stat = await fs.lstat(resolved) - } finally { - // if we got a stat, return its contents - if (stat) { - return { uid: stat.uid, gid: stat.gid } - } - - // try the parent directory - if (resolved !== dirname(resolved)) { - return find(dirname(resolved)) - } - - // no more parents, never got a stat, just return an empty object - return {} - } -} - -// given a path, uid, and gid update the ownership of the path if necessary -const update = async (path, uid, gid) => { - // nothing to update, just exit - if (uid === undefined && gid === undefined) { - return - } - - try { - // see if the permissions are already the same, if they are we don't - // need to do anything, so return early - const stat = await fs.stat(path) - if (uid === stat.uid && gid === stat.gid) { - return - } - } catch { - // ignore errors - } - - try { - await fs.chown(path, uid, gid) - } catch { - // ignore errors - } -} - -// accepts a `path` and the `owner` property of an options object and normalizes -// it into an object with numerical `uid` and `gid` -const validate = async (path, input) => { - let uid - let gid - - if (typeof input === 'string' || typeof input === 'number') { - uid = input - gid = input - } else if (input && typeof input === 'object') { - uid = input.uid - gid = input.gid - } - - if (uid === 'inherit' || gid === 'inherit') { - const owner = await find(path) - if (uid === 'inherit') { - uid = owner.uid - } - - if (gid === 'inherit') { - gid = owner.gid - } - } - - return { uid, gid } -} - -module.exports = { - find, - update, - validate, -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js deleted file mode 100644 index 8888266d627f0..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js +++ /dev/null @@ -1,16 +0,0 @@ -const fs = require('./fs.js') -const getOptions = require('./common/get-options.js') -const withOwner = require('./with-owner.js') - -const copyFile = async (src, dest, opts) => { - const options = getOptions(opts, { - copy: ['mode'], - wrap: 'mode', - }) - - // the node core method as of 16.5.0 does not support the mode being in an - // object, so we have to pass the mode value directly - return withOwner(dest, () => fs.copyFile(src, dest, options.mode), opts) -} - -module.exports = copyFile diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE deleted file mode 100644 index 93546dfb7655b..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js deleted file mode 100644 index 5da4739bdd528..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('../fs.js') -const getOptions = require('../common/get-options.js') -const node = require('../common/node.js') -const polyfill = require('./polyfill.js') - -// node 16.7.0 added fs.cp -const useNative = node.satisfies('>=16.7.0') - -const cp = async (src, dest, opts) => { - const options = getOptions(opts, { - copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], - }) - - // the polyfill is tested separately from this module, no need to hack - // process.version to try to trigger it just for coverage - // istanbul ignore next - return useNative - ? fs.cp(src, dest, options) - : polyfill(src, dest, options) -} - -module.exports = cp diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js deleted file mode 100644 index f83ccbf57ecc9..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js +++ /dev/null @@ -1,428 +0,0 @@ -// this file is a modified version of the code in node 17.2.0 -// which is, in turn, a modified version of the fs-extra module on npm -// node core changes: -// - Use of the assert module has been replaced with core's error system. -// - All code related to the glob dependency has been removed. -// - Bring your own custom fs module is not currently supported. -// - Some basic code cleanup. -// changes here: -// - remove all callback related code -// - drop sync support -// - change assertions back to non-internal methods (see options.js) -// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows -'use strict' - -const { - ERR_FS_CP_DIR_TO_NON_DIR, - ERR_FS_CP_EEXIST, - ERR_FS_CP_EINVAL, - ERR_FS_CP_FIFO_PIPE, - ERR_FS_CP_NON_DIR_TO_DIR, - ERR_FS_CP_SOCKET, - ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, - ERR_FS_CP_UNKNOWN, - ERR_FS_EISDIR, - ERR_INVALID_ARG_TYPE, -} = require('../errors.js') -const { - constants: { - errno: { - EEXIST, - EISDIR, - EINVAL, - ENOTDIR, - }, - }, -} = require('os') -const { - chmod, - copyFile, - lstat, - mkdir, - readdir, - readlink, - stat, - symlink, - unlink, - utimes, -} = require('../fs.js') -const { - dirname, - isAbsolute, - join, - parse, - resolve, - sep, - toNamespacedPath, -} = require('path') -const { fileURLToPath } = require('url') - -const defaultOptions = { - dereference: false, - errorOnExist: false, - filter: undefined, - force: true, - preserveTimestamps: false, - recursive: false, -} - -async function cp (src, dest, opts) { - if (opts != null && typeof opts !== 'object') { - throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) - } - return cpFn( - toNamespacedPath(getValidatedPath(src)), - toNamespacedPath(getValidatedPath(dest)), - { ...defaultOptions, ...opts }) -} - -function getValidatedPath (fileURLOrPath) { - const path = fileURLOrPath != null && fileURLOrPath.href - && fileURLOrPath.origin - ? fileURLToPath(fileURLOrPath) - : fileURLOrPath - return path -} - -async function cpFn (src, dest, opts) { - // Warn about using preserveTimestamps on 32-bit node - // istanbul ignore next - if (opts.preserveTimestamps && process.arch === 'ia32') { - const warning = 'Using the preserveTimestamps option in 32-bit ' + - 'node is not recommended' - process.emitWarning(warning, 'TimestampPrecisionWarning') - } - const stats = await checkPaths(src, dest, opts) - const { srcStat, destStat } = stats - await checkParentPaths(src, srcStat, dest) - if (opts.filter) { - return handleFilter(checkParentDir, destStat, src, dest, opts) - } - return checkParentDir(destStat, src, dest, opts) -} - -async function checkPaths (src, dest, opts) { - const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) - if (destStat) { - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: 'src and dest cannot be the same', - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new ERR_FS_CP_DIR_TO_NON_DIR({ - message: `cannot overwrite directory ${src} ` + - `with non-directory ${dest}`, - path: dest, - syscall: 'cp', - errno: EISDIR, - }) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new ERR_FS_CP_NON_DIR_TO_DIR({ - message: `cannot overwrite non-directory ${src} ` + - `with directory ${dest}`, - path: dest, - syscall: 'cp', - errno: ENOTDIR, - }) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return { srcStat, destStat } -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && - destStat.dev === srcStat.dev -} - -function getStats (src, dest, opts) { - const statFunc = opts.dereference ? - (file) => stat(file, { bigint: true }) : - (file) => lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch((err) => { - // istanbul ignore next: unsure how to cover. - if (err.code === 'ENOENT') { - return null - } - // istanbul ignore next: unsure how to cover. - throw err - }), - ]) -} - -async function checkParentDir (destStat, src, dest, opts) { - const destParent = dirname(dest) - const dirExists = await pathExists(destParent) - if (dirExists) { - return getStatsForCopy(destStat, src, dest, opts) - } - await mkdir(destParent, { recursive: true }) - return getStatsForCopy(destStat, src, dest, opts) -} - -function pathExists (dest) { - return stat(dest).then( - () => true, - // istanbul ignore next: not sure when this would occur - (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) -} - -// Recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -async function checkParentPaths (src, srcStat, dest) { - const srcParent = resolve(dirname(src)) - const destParent = resolve(dirname(dest)) - if (destParent === srcParent || destParent === parse(destParent).root) { - return - } - let destStat - try { - destStat = await stat(destParent, { bigint: true }) - } catch (err) { - // istanbul ignore else: not sure when this would occur - if (err.code === 'ENOENT') { - return - } - // istanbul ignore next: not sure when this would occur - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return checkParentPaths(src, srcStat, destParent) -} - -const normalizePathToArray = (path) => - resolve(path).split(sep).filter(Boolean) - -// Return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = normalizePathToArray(src) - const destArr = normalizePathToArray(dest) - return srcArr.every((cur, i) => destArr[i] === cur) -} - -async function handleFilter (onInclude, destStat, src, dest, opts, cb) { - const include = await opts.filter(src, dest) - if (include) { - return onInclude(destStat, src, dest, opts, cb) - } -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter) { - return handleFilter(getStatsForCopy, destStat, src, dest, opts) - } - return getStatsForCopy(destStat, src, dest, opts) -} - -async function getStatsForCopy (destStat, src, dest, opts) { - const statFn = opts.dereference ? stat : lstat - const srcStat = await statFn(src) - // istanbul ignore else: can't portably test FIFO - if (srcStat.isDirectory() && opts.recursive) { - return onDir(srcStat, destStat, src, dest, opts) - } else if (srcStat.isDirectory()) { - throw new ERR_FS_EISDIR({ - message: `${src} is a directory (not copied)`, - path: src, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) { - return onFile(srcStat, destStat, src, dest, opts) - } else if (srcStat.isSymbolicLink()) { - return onLink(destStat, src, dest) - } else if (srcStat.isSocket()) { - throw new ERR_FS_CP_SOCKET({ - message: `cannot copy a socket file: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFIFO()) { - throw new ERR_FS_CP_FIFO_PIPE({ - message: `cannot copy a FIFO pipe: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // istanbul ignore next: should be unreachable - throw new ERR_FS_CP_UNKNOWN({ - message: `cannot copy an unknown file type: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return _copyFile(srcStat, src, dest, opts) - } - return mayCopyFile(srcStat, src, dest, opts) -} - -async function mayCopyFile (srcStat, src, dest, opts) { - if (opts.force) { - await unlink(dest) - return _copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new ERR_FS_CP_EEXIST({ - message: `${dest} already exists`, - path: dest, - syscall: 'cp', - errno: EEXIST, - }) - } -} - -async function _copyFile (srcStat, src, dest, opts) { - await copyFile(src, dest) - if (opts.preserveTimestamps) { - return handleTimestampsAndMode(srcStat.mode, src, dest) - } - return setDestMode(dest, srcStat.mode) -} - -async function handleTimestampsAndMode (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - await makeFileWritable(dest, srcMode) - return setDestTimestampsAndMode(srcMode, src, dest) - } - return setDestTimestampsAndMode(srcMode, src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -async function setDestTimestampsAndMode (srcMode, src, dest) { - await setDestTimestamps(src, dest) - return setDestMode(dest, srcMode) -} - -function setDestMode (dest, srcMode) { - return chmod(dest, srcMode) -} - -async function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = await stat(src) - return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return mkDirAndCopy(srcStat.mode, src, dest, opts) - } - return copyDir(src, dest, opts) -} - -async function mkDirAndCopy (srcMode, src, dest, opts) { - await mkdir(dest) - await copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -async function copyDir (src, dest, opts) { - const dir = await readdir(src) - for (let i = 0; i < dir.length; i++) { - const item = dir[i] - const srcItem = join(src, item) - const destItem = join(dest, item) - const { destStat } = await checkPaths(srcItem, destItem, opts) - await startCopy(destStat, srcItem, destItem, opts) - } -} - -async function onLink (destStat, src, dest) { - let resolvedSrc = await readlink(src) - if (!isAbsolute(resolvedSrc)) { - resolvedSrc = resolve(dirname(src), resolvedSrc) - } - if (!destStat) { - return symlink(resolvedSrc, dest) - } - let resolvedDest - try { - resolvedDest = await readlink(dest) - } catch (err) { - // Dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - // istanbul ignore next: can only test on windows - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { - return symlink(resolvedSrc, dest) - } - // istanbul ignore next: should not be possible - throw err - } - if (!isAbsolute(resolvedDest)) { - resolvedDest = resolve(dirname(dest), resolvedDest) - } - if (isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + - `${resolvedDest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // Do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - const srcStat = await stat(src) - if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ - message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return copyLink(resolvedSrc, dest) -} - -async function copyLink (resolvedSrc, dest) { - await unlink(dest) - return symlink(resolvedSrc, dest) -} - -module.exports = cp diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js deleted file mode 100644 index 1cd1e05d0c533..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js +++ /dev/null @@ -1,129 +0,0 @@ -'use strict' -const { inspect } = require('util') - -// adapted from node's internal/errors -// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js - -// close copy of node's internal SystemError class. -class SystemError { - constructor (code, prefix, context) { - // XXX context.code is undefined in all constructors used in cp/polyfill - // that may be a bug copied from node, maybe the constructor should use - // `code` not `errno`? nodejs/node#41104 - let message = `${prefix}: ${context.syscall} returned ` + - `${context.code} (${context.message})` - - if (context.path !== undefined) { - message += ` ${context.path}` - } - if (context.dest !== undefined) { - message += ` => ${context.dest}` - } - - this.code = code - Object.defineProperties(this, { - name: { - value: 'SystemError', - enumerable: false, - writable: true, - configurable: true, - }, - message: { - value: message, - enumerable: false, - writable: true, - configurable: true, - }, - info: { - value: context, - enumerable: true, - configurable: true, - writable: false, - }, - errno: { - get () { - return context.errno - }, - set (value) { - context.errno = value - }, - enumerable: true, - configurable: true, - }, - syscall: { - get () { - return context.syscall - }, - set (value) { - context.syscall = value - }, - enumerable: true, - configurable: true, - }, - }) - - if (context.path !== undefined) { - Object.defineProperty(this, 'path', { - get () { - return context.path - }, - set (value) { - context.path = value - }, - enumerable: true, - configurable: true, - }) - } - - if (context.dest !== undefined) { - Object.defineProperty(this, 'dest', { - get () { - return context.dest - }, - set (value) { - context.dest = value - }, - enumerable: true, - configurable: true, - }) - } - } - - toString () { - return `${this.name} [${this.code}]: ${this.message}` - } - - [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { - return inspect(this, { - ...ctx, - getters: true, - customInspect: false, - }) - } -} - -function E (code, message) { - module.exports[code] = class NodeError extends SystemError { - constructor (ctx) { - super(code, message, ctx) - } - } -} - -E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') -E('ERR_FS_CP_EEXIST', 'Target already exists') -E('ERR_FS_CP_EINVAL', 'Invalid src or dest') -E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') -E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') -E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') -E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') -E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') -E('ERR_FS_EISDIR', 'Path is a directory') - -module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { - constructor (name, expected, actual) { - super() - this.code = 'ERR_INVALID_ARG_TYPE' - this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` - } -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js deleted file mode 100644 index 457da10eed03e..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js +++ /dev/null @@ -1,14 +0,0 @@ -const fs = require('fs') -const promisify = require('@gar/promisify') - -const isLower = (s) => s === s.toLowerCase() && s !== s.toUpperCase() - -const fsSync = Object.fromEntries(Object.entries(fs).filter(([k, v]) => - typeof v === 'function' && (k.endsWith('Sync') || !isLower(k[0])) -)) - -// this module returns the core fs async fns wrapped in a proxy that promisifies -// method calls within the getter. we keep it in a separate module so that the -// overridden methods have a consistent way to get to promisified fs methods -// without creating a circular dependency. the ctors and sync methods are kept untouched -module.exports = { ...promisify(fs), ...fsSync } diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js deleted file mode 100644 index 3a98648eca9a1..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js +++ /dev/null @@ -1,12 +0,0 @@ -module.exports = { - ...require('./fs.js'), - copyFile: require('./copy-file.js'), - cp: require('./cp/index.js'), - mkdir: require('./mkdir.js'), - mkdtemp: require('./mkdtemp.js'), - rm: require('./rm/index.js'), - withTempDir: require('./with-temp-dir.js'), - withOwner: require('./with-owner.js'), - withOwnerSync: require('./with-owner-sync.js'), - writeFile: require('./write-file.js'), -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir.js deleted file mode 100644 index 098d8d0a09ae3..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir.js +++ /dev/null @@ -1,19 +0,0 @@ -const fs = require('./fs.js') -const getOptions = require('./common/get-options.js') -const withOwner = require('./with-owner.js') - -// extends mkdir with the ability to specify an owner of the new dir -const mkdir = async (path, opts) => { - const options = getOptions(opts, { - copy: ['mode', 'recursive'], - wrap: 'mode', - }) - - return withOwner( - path, - () => fs.mkdir(path, options), - opts - ) -} - -module.exports = mkdir diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js deleted file mode 100644 index 60b12a788de90..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js +++ /dev/null @@ -1,23 +0,0 @@ -const { dirname, sep } = require('path') - -const fs = require('./fs.js') -const getOptions = require('./common/get-options.js') -const withOwner = require('./with-owner.js') - -const mkdtemp = async (prefix, opts) => { - const options = getOptions(opts, { - copy: ['encoding'], - wrap: 'encoding', - }) - - // mkdtemp relies on the trailing path separator to indicate if it should - // create a directory inside of the prefix. if that's the case then the root - // we infer ownership from is the prefix itself, otherwise it's the dirname - // /tmp -> /tmpABCDEF, infers from / - // /tmp/ -> /tmp/ABCDEF, infers from /tmp - const root = prefix.endsWith(sep) ? prefix : dirname(prefix) - - return withOwner(root, () => fs.mkdtemp(prefix, options), opts) -} - -module.exports = mkdtemp diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js deleted file mode 100644 index cb81fbdf8cc47..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('../fs.js') -const getOptions = require('../common/get-options.js') -const node = require('../common/node.js') -const polyfill = require('./polyfill.js') - -// node 14.14.0 added fs.rm, which allows both the force and recursive options -const useNative = node.satisfies('>=14.14.0') - -const rm = async (path, opts) => { - const options = getOptions(opts, { - copy: ['retryDelay', 'maxRetries', 'recursive', 'force'], - }) - - // the polyfill is tested separately from this module, no need to hack - // process.version to try to trigger it just for coverage - // istanbul ignore next - return useNative - ? fs.rm(path, options) - : polyfill(path, options) -} - -module.exports = rm diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js deleted file mode 100644 index a25c17483b001..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js +++ /dev/null @@ -1,239 +0,0 @@ -// this file is a modified version of the code in node core >=14.14.0 -// which is, in turn, a modified version of the rimraf module on npm -// node core changes: -// - Use of the assert module has been replaced with core's error system. -// - All code related to the glob dependency has been removed. -// - Bring your own custom fs module is not currently supported. -// - Some basic code cleanup. -// changes here: -// - remove all callback related code -// - drop sync support -// - change assertions back to non-internal methods (see options.js) -// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows -const errnos = require('os').constants.errno -const { join } = require('path') -const fs = require('../fs.js') - -// error codes that mean we need to remove contents -const notEmptyCodes = new Set([ - 'ENOTEMPTY', - 'EEXIST', - 'EPERM', -]) - -// error codes we can retry later -const retryCodes = new Set([ - 'EBUSY', - 'EMFILE', - 'ENFILE', - 'ENOTEMPTY', - 'EPERM', -]) - -const isWindows = process.platform === 'win32' - -const defaultOptions = { - retryDelay: 100, - maxRetries: 0, - recursive: false, - force: false, -} - -// this is drastically simplified, but should be roughly equivalent to what -// node core throws -class ERR_FS_EISDIR extends Error { - constructor (path) { - super() - this.info = { - code: 'EISDIR', - message: 'is a directory', - path, - syscall: 'rm', - errno: errnos.EISDIR, - } - this.name = 'SystemError' - this.code = 'ERR_FS_EISDIR' - this.errno = errnos.EISDIR - this.syscall = 'rm' - this.path = path - this.message = `Path is a directory: ${this.syscall} returned ` + - `${this.info.code} (is a directory) ${path}` - } - - toString () { - return `${this.name} [${this.code}]: ${this.message}` - } -} - -class ENOTDIR extends Error { - constructor (path) { - super() - this.name = 'Error' - this.code = 'ENOTDIR' - this.errno = errnos.ENOTDIR - this.syscall = 'rmdir' - this.path = path - this.message = `not a directory, ${this.syscall} '${this.path}'` - } - - toString () { - return `${this.name}: ${this.code}: ${this.message}` - } -} - -// force is passed separately here because we respect it for the first entry -// into rimraf only, any further calls that are spawned as a result (i.e. to -// delete content within the target) will ignore ENOENT errors -const rimraf = async (path, options, isTop = false) => { - const force = isTop ? options.force : true - const stat = await fs.lstat(path) - .catch((err) => { - // we only ignore ENOENT if we're forcing this call - if (err.code === 'ENOENT' && force) { - return - } - - if (isWindows && err.code === 'EPERM') { - return fixEPERM(path, options, err, isTop) - } - - throw err - }) - - // no stat object here means either lstat threw an ENOENT, or lstat threw - // an EPERM and the fixPERM function took care of things. either way, we're - // already done, so return early - if (!stat) { - return - } - - if (stat.isDirectory()) { - return rmdir(path, options, null, isTop) - } - - return fs.unlink(path) - .catch((err) => { - if (err.code === 'ENOENT' && force) { - return - } - - if (err.code === 'EISDIR') { - return rmdir(path, options, err, isTop) - } - - if (err.code === 'EPERM') { - // in windows, we handle this through fixEPERM which will also try to - // delete things again. everywhere else since deleting the target as a - // file didn't work we go ahead and try to delete it as a directory - return isWindows - ? fixEPERM(path, options, err, isTop) - : rmdir(path, options, err, isTop) - } - - throw err - }) -} - -const fixEPERM = async (path, options, originalErr, isTop) => { - const force = isTop ? options.force : true - const targetMissing = await fs.chmod(path, 0o666) - .catch((err) => { - if (err.code === 'ENOENT' && force) { - return true - } - - throw originalErr - }) - - // got an ENOENT above, return now. no file = no problem - if (targetMissing) { - return - } - - // this function does its own lstat rather than calling rimraf again to avoid - // infinite recursion for a repeating EPERM - const stat = await fs.lstat(path) - .catch((err) => { - if (err.code === 'ENOENT' && force) { - return - } - - throw originalErr - }) - - if (!stat) { - return - } - - if (stat.isDirectory()) { - return rmdir(path, options, originalErr, isTop) - } - - return fs.unlink(path) -} - -const rmdir = async (path, options, originalErr, isTop) => { - if (!options.recursive && isTop) { - throw originalErr || new ERR_FS_EISDIR(path) - } - const force = isTop ? options.force : true - - return fs.rmdir(path) - .catch(async (err) => { - // in Windows, calling rmdir on a file path will fail with ENOENT rather - // than ENOTDIR. to determine if that's what happened, we have to do - // another lstat on the path. if the path isn't actually gone, we throw - // away the ENOENT and replace it with our own ENOTDIR - if (isWindows && err.code === 'ENOENT') { - const stillExists = await fs.lstat(path).then(() => true, () => false) - if (stillExists) { - err = new ENOTDIR(path) - } - } - - // not there, not a problem - if (err.code === 'ENOENT' && force) { - return - } - - // we may not have originalErr if lstat tells us our target is a - // directory but that changes before we actually remove it, so - // only throw it here if it's set - if (originalErr && err.code === 'ENOTDIR') { - throw originalErr - } - - // the directory isn't empty, remove the contents and try again - if (notEmptyCodes.has(err.code)) { - const files = await fs.readdir(path) - await Promise.all(files.map((file) => { - const target = join(path, file) - return rimraf(target, options) - })) - return fs.rmdir(path) - } - - throw err - }) -} - -const rm = async (path, opts) => { - const options = { ...defaultOptions, ...opts } - let retries = 0 - - const errHandler = async (err) => { - if (retryCodes.has(err.code) && ++retries < options.maxRetries) { - const delay = retries * options.retryDelay - await promiseTimeout(delay) - return rimraf(path, options, true).catch(errHandler) - } - - throw err - } - - return rimraf(path, options, true).catch(errHandler) -} - -const promiseTimeout = (ms) => new Promise((r) => setTimeout(r, ms)) - -module.exports = rm diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner-sync.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner-sync.js deleted file mode 100644 index 3597d1c810475..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner-sync.js +++ /dev/null @@ -1,21 +0,0 @@ -const getOptions = require('./common/get-options.js') -const owner = require('./common/owner-sync.js') - -const withOwnerSync = (path, fn, opts) => { - const options = getOptions(opts, { - copy: ['owner'], - }) - - const { uid, gid } = owner.validate(path, options.owner) - - const result = fn({ uid, gid }) - - owner.update(path, uid, gid) - if (typeof result === 'string') { - owner.update(result, uid, gid) - } - - return result -} - -module.exports = withOwnerSync diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner.js deleted file mode 100644 index a679102883dbb..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner.js +++ /dev/null @@ -1,21 +0,0 @@ -const getOptions = require('./common/get-options.js') -const owner = require('./common/owner.js') - -const withOwner = async (path, fn, opts) => { - const options = getOptions(opts, { - copy: ['owner'], - }) - - const { uid, gid } = await owner.validate(path, options.owner) - - const result = await fn({ uid, gid }) - - await Promise.all([ - owner.update(path, uid, gid), - typeof result === 'string' ? owner.update(result, uid, gid) : null, - ]) - - return result -} - -module.exports = withOwner diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js deleted file mode 100644 index 81db59dd054b4..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js +++ /dev/null @@ -1,41 +0,0 @@ -const { join, sep } = require('path') - -const getOptions = require('./common/get-options.js') -const mkdir = require('./mkdir.js') -const mkdtemp = require('./mkdtemp.js') -const rm = require('./rm/index.js') - -// create a temp directory, ensure its permissions match its parent, then call -// the supplied function passing it the path to the directory. clean up after -// the function finishes, whether it throws or not -const withTempDir = async (root, fn, opts) => { - const options = getOptions(opts, { - copy: ['tmpPrefix'], - }) - // create the directory, and fix its ownership - await mkdir(root, { recursive: true, owner: 'inherit' }) - - const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''), { owner: 'inherit' }) - let err - let result - - try { - result = await fn(target) - } catch (_err) { - err = _err - } - - try { - await rm(target, { force: true, recursive: true }) - } catch { - // ignore errors - } - - if (err) { - throw err - } - - return result -} - -module.exports = withTempDir diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js deleted file mode 100644 index ff900571a1f28..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js +++ /dev/null @@ -1,14 +0,0 @@ -const fs = require('./fs.js') -const getOptions = require('./common/get-options.js') -const withOwner = require('./with-owner.js') - -const writeFile = async (file, data, opts) => { - const options = getOptions(opts, { - copy: ['encoding', 'mode', 'flag', 'signal'], - wrap: 'encoding', - }) - - return withOwner(file, () => fs.writeFile(file, data, options), opts) -} - -module.exports = writeFile diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/package.json b/node_modules/node-gyp/node_modules/@npmcli/fs/package.json deleted file mode 100644 index 1512fd6e4b0ac..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/fs/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "@npmcli/fs", - "version": "2.1.2", - "description": "filesystem utilities for the npm cli", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "snap": "tap", - "test": "tap", - "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.js\"", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/fs.git" - }, - "keywords": [ - "npm", - "oss" - ], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.0.1" - }, - "dependencies": { - "@gar/promisify": "^1.1.3", - "semver": "^7.3.5" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/node_modules/node-gyp/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 7c20c75257b4f..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,241 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = await fs.stat(cpath) - return { stat, cpath, sri } - }) - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.sync = readSync - -function readSync (cache, integrity, opts = {}) { - const { size } = opts - return withContentSriSync(cache, integrity, (cpath, sri) => { - const data = fs.readFileSync(cpath, { encoding: null }) - if (typeof size === 'number' && size !== data.length) { - throw sizeError(size, data.length) - } - - if (ssri.checkData(data, sri)) { - return data - } - - throw integrityError(sri, cpath) - }) -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // just stat to ensure it exists - const stat = await fs.stat(cpath) - return { stat, cpath, sri } - }) - if (typeof size === 'number' && size !== stat.size) { - return stream.emit('error', sizeError(size, stat.size)) - } - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy -module.exports.copy.sync = copySync - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath, sri) => { - return fs.copyFile(cpath, dest) - }) -} - -function copySync (cache, integrity, dest) { - return withContentSriSync(cache, integrity, (cpath, sri) => { - return fs.copyFileSync(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -module.exports.hasContent.sync = hasContentSync - -function hasContentSync (cache, integrity) { - if (!integrity) { - return false - } - - return withContentSriSync(cache, integrity, (cpath, sri) => { - try { - const stat = fs.statSync(cpath) - return { size: stat.size, sri, stat } - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } - }) -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function withContentSriSync (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - let lastErr = null - for (const meta of digests) { - try { - return withContentSriSync(cache, meta, fn) - } catch (err) { - lastErr = err - } - } - throw lastErr - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index f7333053b393f..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const util = require('util') - -const contentPath = require('./path') -const { hasContent } = require('./read') -const rimraf = util.promisify(require('rimraf')) - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await rimraf(contentPath(cache, content.sri)) - return true - } else { - return false - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js deleted file mode 100644 index 0e8c0f4936064..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,189 +0,0 @@ -'use strict' - -const events = require('events') -const util = require('util') - -const contentPath = require('./path') -const fixOwner = require('../util/fix-owner') -const fs = require('@npmcli/fs') -const moveFile = require('../util/move-file') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - if (algorithms && algorithms.length > 1) { - throw new Error('opts.algorithms only supports a single algorithm for now') - } - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - const tmp = await makeTmp(cache, opts) - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, sri, opts) - return { integrity: sri, size: data.length } - } finally { - if (!tmp.moved) { - await rimraf(tmp.target) - } - } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await rimraf(tmp.target) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(tmpTarget)) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri, opts) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - - await fixOwner.mkdirfix(cache, destDir) - await moveFile(tmp.target, destination) - tmp.moved = true - await fixOwner.chownr(cache, destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 1dc73a93f6b29..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,404 +0,0 @@ -'use strict' - -const util = require('util') -const crypto = require('crypto') -const fs = require('@npmcli/fs') -const Minipass = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const moveFile = require('@npmcli/move-file') -const _rimraf = require('rimraf') -const rimraf = util.promisify(_rimraf) -rimraf.sync = _rimraf.sync - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(target)) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rimraf(tmp.target) - } - } - - const write = async (tmp) => { - await fs.writeFile(tmp.target, newIndex, { flag: 'wx' }) - await fixOwner.mkdirfix(cache, path.dirname(bucket)) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - try { - await fixOwner.chownr(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - try { - await fixOwner.mkdirfix(cache, path.dirname(bucket)) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - await fixOwner.chownr(cache, bucket) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - // There's a class of race conditions that happen when things get deleted - // during fixOwner, or between the two mkdirfix/chownr calls. - // - // It's perfectly fine to just not bother in those cases and lie - // that the index entry was written. Because it's a cache. - } - return formatEntry(cache, entry) -} - -module.exports.insert.sync = insertSync - -function insertSync (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) - const stringified = JSON.stringify(entry) - fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - try { - fixOwner.chownr.sync(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.find.sync = findSync - -function findSync (cache, key) { - const bucket = bucketPath(cache, key) - try { - return bucketEntriesSync(bucket).reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf(bucket) -} - -module.exports.delete.sync = delSync - -function delSync (cache, key, opts = {}) { - if (!opts.removeFully) { - return insertSync(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf.sync(bucket) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const buckets = await readdirOrEmpty(indexDir) - await Promise.all(buckets.map(async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await Promise.all(subbuckets.map(async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await Promise.all(subbucketEntries.map(async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - })) - })) - })) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await fs.readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -module.exports.bucketEntries.sync = bucketEntriesSync - -function bucketEntriesSync (bucket, filter) { - const data = fs.readFileSync(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data, filter) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (e) { - // Entry is corrupted! - return - } - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return fs.readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/get.js b/node_modules/node-gyp/node_modules/cacache/lib/get.js deleted file mode 100644 index 254b4ecc38b57..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,225 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -function getDataSync (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - const entry = index.find.sync(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = read.sync(cache, entry.integrity, { - integrity: integrity, - size: size, - }) - const res = { - metadata: entry.metadata, - data: data, - size: entry.size, - integrity: entry.integrity, - } - if (memoize) { - memo.put(cache, entry, res.data, opts) - } - - return res -} - -module.exports.sync = getDataSync - -function getDataByDigestSync (cache, digest, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, digest, opts) - - if (memoized && memoize !== false) { - return memoized - } - - const res = read.sync(cache, digest, { - integrity: integrity, - size: size, - }) - if (memoize) { - memo.put.byDigest(cache, digest, res, opts) - } - - return res -} -module.exports.sync.byDigest = getDataByDigestSync - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/node-gyp/node_modules/cacache/lib/index.js b/node_modules/node-gyp/node_modules/cacache/lib/index.js deleted file mode 100644 index 1c56be68dd8fd..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.sync = get.sync -module.exports.get.sync.byDigest = get.sync.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent -module.exports.get.hasContent.sync = get.hasContent.sync - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 0ff604a479c9c..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const LRU = require('lru-cache') - -const MEMOIZED = new LRU({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/put.js b/node_modules/node-gyp/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d5f6dec..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/rm.js deleted file mode 100644 index 5f00071770b8d..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const util = require('util') - -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -function all (cache) { - memo.clearMemoized() - return rimraf(path.join(cache, '*(content-*|index-*)')) -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js b/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js deleted file mode 100644 index 182fcb028f06c..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js +++ /dev/null @@ -1,145 +0,0 @@ -'use strict' - -const util = require('util') - -const chownr = util.promisify(require('chownr')) -const mkdirp = require('mkdirp') -const inflight = require('promise-inflight') -const inferOwner = require('infer-owner') - -// Memoize getuid()/getgid() calls. -// patch process.setuid/setgid to invalidate cached value on change -const self = { uid: null, gid: null } -const getSelf = () => { - if (typeof self.uid !== 'number') { - self.uid = process.getuid() - const setuid = process.setuid - process.setuid = (uid) => { - self.uid = null - process.setuid = setuid - return process.setuid(uid) - } - } - if (typeof self.gid !== 'number') { - self.gid = process.getgid() - const setgid = process.setgid - process.setgid = (gid) => { - self.gid = null - process.setgid = setgid - return process.setgid(gid) - } - } -} - -module.exports.chownr = fixOwner - -async function fixOwner (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - const { uid, gid } = await inferOwner(cache) - - // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { - return - } - - return inflight('fixOwner: fixing ownership on ' + filepath, () => - chownr( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ).catch((err) => { - if (err.code === 'ENOENT') { - return null - } - - throw err - }) - ) -} - -module.exports.chownr.sync = fixOwnerSync - -function fixOwnerSync (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - const { uid, gid } = inferOwner.sync(cache) - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - if (self.uid === uid && self.gid === gid) { - // No need to override if it's already what we used. - return - } - try { - chownr.sync( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ) - } catch (err) { - // only catch ENOENT, any other error is a problem. - if (err.code === 'ENOENT') { - return null - } - - throw err - } -} - -module.exports.mkdirfix = mkdirfix - -async function mkdirfix (cache, p, cb) { - // we have to infer the owner _before_ making the directory, even though - // we aren't going to use the results, since the cache itself might not - // exist yet. If we mkdirp it, then our current uid/gid will be assumed - // to be correct if it creates the cache folder in the process. - await inferOwner(cache) - try { - const made = await mkdirp(p) - if (made) { - await fixOwner(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - await fixOwner(cache, p) - return null - } - throw err - } -} - -module.exports.mkdirfix.sync = mkdirfixSync - -function mkdirfixSync (cache, p) { - try { - inferOwner.sync(cache) - const made = mkdirp.sync(p) - if (made) { - fixOwnerSync(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - fixOwnerSync(cache, p) - return null - } else { - throw err - } - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js b/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js deleted file mode 100644 index a0b40413cb56e..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') -const move = require('@npmcli/move-file') -const pinflight = require('promise-inflight') - -module.exports = moveFile - -async function moveFile (src, dest) { - const isWindows = process.platform === 'win32' - - // This isn't quite an fs.rename -- the assumption is that - // if `dest` already exists, and we get certain errors while - // trying to move it, we should just not bother. - // - // In the case of cache corruption, users will receive an - // EINTEGRITY error elsewhere, and can remove the offending - // content their own way. - // - // Note that, as the name suggests, this strictly only supports file moves. - try { - await fs.link(src, dest) - } catch (err) { - if (isWindows && err.code === 'EPERM') { - // XXX This is a really weird way to handle this situation, as it - // results in the src file being deleted even though the dest - // might not exist. Since we pretty much always write files to - // deterministic locations based on content hash, this is likely - // ok (or at worst, just ends in a future cache miss). But it would - // be worth investigating at some time in the future if this is - // really what we want to do here. - } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { - // file already exists, so whatever - } else { - throw err - } - } - try { - await Promise.all([ - fs.unlink(src), - !isWindows && fs.chmod(dest, '0444'), - ]) - } catch (e) { - return pinflight('cacache-move-file:' + dest, async () => { - await fs.stat(dest).catch((err) => { - if (err.code !== 'ENOENT') { - // Something else is wrong here. Bail bail bail - throw err - } - }) - // file doesn't already exist! let's try a rename -> copy fallback - // only delete if it successfully copies - return move(src, dest) - }) - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index b4437cfcbeed6..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') - -const fixOwner = require('./fix-owner') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) -} - -module.exports.fix = fixtmpdir - -function fixtmpdir (cache) { - return fixOwner(cache, path.join(cache, 'tmp')) -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/node_modules/node-gyp/node_modules/cacache/lib/verify.js deleted file mode 100644 index 52692a01d192f..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,257 +0,0 @@ -'use strict' - -const util = require('util') - -const pMap = require('p-map') -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const fs = require('@npmcli/fs') -const fsm = require('fs-minipass') -const glob = util.promisify(require('glob')) -const index = require('./entry-index') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') - -const globify = pattern => pattern.split('\\').join('/') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime (cache, opts) { - return { startTime: new Date() } -} - -async function markEndTime (cache, opts) { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await fixOwner.mkdirfix(cache, cache) - // TODO - fix file permissions too - await fixOwner.chownr(cache, cache) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rimraf it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - liveContent.add(entry.integrity.toString()) - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(globify(path.join(contentDir, '**')), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await fs.stat(f) - await rimraf(f) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await fs.stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rimraf(filepath) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats, opts) { - await fs.truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await fs.stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rimraf(path.join(cache, 'tmp')) -} - -function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - try { - return fs.writeFile(verifile, `${Date.now()}`) - } finally { - fixOwner.chownr.sync(cache, verifile) - } -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE deleted file mode 100644 index de3226673c387..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2013 Julian Gruber - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js deleted file mode 100644 index 4af9ddee463f4..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js +++ /dev/null @@ -1,203 +0,0 @@ -var balanced = require('balanced-match'); - -module.exports = expandTop; - -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; - -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} - -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} - -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} - - -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; - - var parts = []; - var m = balanced('{', '}', str); - - if (!m) - return str.split(','); - - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); - - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } - - parts.push.apply(parts, p); - - return parts; -} - -function expandTop(str) { - if (!str) - return []; - - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } - - return expand(escapeBraces(str), true).map(unescapeBraces); -} - -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} - -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} - -function expand(str, isTop) { - var expansions = []; - - var m = balanced('{', '}', str); - if (!m) return [str]; - - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; - - if (/\$$/.test(m.pre)) { - for (var k = 0; k < post.length; k++) { - var expansion = pre+ '{' + m.body + '}' + post[k]; - expansions.push(expansion); - } - } else { - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } - - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } - - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. - var N; - - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); - - N = []; - - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = []; - - for (var j = 0; j < n.length; j++) { - N.push.apply(N, expand(n[j], false)); - } - } - - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } - } - } - - return expansions; -} - diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json deleted file mode 100644 index 7097d41e39de5..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "brace-expansion", - "description": "Brace expansion as known from sh/bash", - "version": "2.0.1", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/brace-expansion.git" - }, - "homepage": "https://github.com/juliangruber/brace-expansion", - "main": "index.js", - "scripts": { - "test": "tape test/*.js", - "gentest": "bash test/generate.sh", - "bench": "matcha test/perf/bench.js" - }, - "dependencies": { - "balanced-match": "^1.0.0" - }, - "devDependencies": { - "@c4312/matcha": "^1.3.1", - "tape": "^4.6.0" - }, - "keywords": [], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT", - "testling": { - "files": "test/*.js", - "browsers": [ - "ie/8..latest", - "firefox/20..latest", - "firefox/nightly", - "chrome/25..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE deleted file mode 100644 index 39e8fe16f665a..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/common.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/common.js deleted file mode 100644 index 61a4452f097dc..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/common.js +++ /dev/null @@ -1,244 +0,0 @@ -exports.setopts = setopts -exports.ownProp = ownProp -exports.makeAbs = makeAbs -exports.finish = finish -exports.mark = mark -exports.isIgnored = isIgnored -exports.childrenIgnored = childrenIgnored - -function ownProp (obj, field) { - return Object.prototype.hasOwnProperty.call(obj, field) -} - -var fs = require("fs") -var path = require("path") -var minimatch = require("minimatch") -var isAbsolute = require("path").isAbsolute -var Minimatch = minimatch.Minimatch - -function alphasort (a, b) { - return a.localeCompare(b, 'en') -} - -function setupIgnores (self, options) { - self.ignore = options.ignore || [] - - if (!Array.isArray(self.ignore)) - self.ignore = [self.ignore] - - if (self.ignore.length) { - self.ignore = self.ignore.map(ignoreMap) - } -} - -// ignore patterns are always in dot:true mode. -function ignoreMap (pattern) { - var gmatcher = null - if (pattern.slice(-3) === '/**') { - var gpattern = pattern.replace(/(\/\*\*)+$/, '') - gmatcher = new Minimatch(gpattern, { dot: true }) - } - - return { - matcher: new Minimatch(pattern, { dot: true }), - gmatcher: gmatcher - } -} - -function setopts (self, pattern, options) { - if (!options) - options = {} - - // base-matching: just use globstar for that. - if (options.matchBase && -1 === pattern.indexOf("/")) { - if (options.noglobstar) { - throw new Error("base matching requires globstar") - } - pattern = "**/" + pattern - } - - self.windowsPathsNoEscape = !!options.windowsPathsNoEscape || - options.allowWindowsEscape === false - if (self.windowsPathsNoEscape) { - pattern = pattern.replace(/\\/g, '/') - } - - self.silent = !!options.silent - self.pattern = pattern - self.strict = options.strict !== false - self.realpath = !!options.realpath - self.realpathCache = options.realpathCache || Object.create(null) - self.follow = !!options.follow - self.dot = !!options.dot - self.mark = !!options.mark - self.nodir = !!options.nodir - if (self.nodir) - self.mark = true - self.sync = !!options.sync - self.nounique = !!options.nounique - self.nonull = !!options.nonull - self.nosort = !!options.nosort - self.nocase = !!options.nocase - self.stat = !!options.stat - self.noprocess = !!options.noprocess - self.absolute = !!options.absolute - self.fs = options.fs || fs - - self.maxLength = options.maxLength || Infinity - self.cache = options.cache || Object.create(null) - self.statCache = options.statCache || Object.create(null) - self.symlinks = options.symlinks || Object.create(null) - - setupIgnores(self, options) - - self.changedCwd = false - var cwd = process.cwd() - if (!ownProp(options, "cwd")) - self.cwd = path.resolve(cwd) - else { - self.cwd = path.resolve(options.cwd) - self.changedCwd = self.cwd !== cwd - } - - self.root = options.root || path.resolve(self.cwd, "/") - self.root = path.resolve(self.root) - - // TODO: is an absolute `cwd` supposed to be resolved against `root`? - // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') - self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) - self.nomount = !!options.nomount - - if (process.platform === "win32") { - self.root = self.root.replace(/\\/g, "/") - self.cwd = self.cwd.replace(/\\/g, "/") - self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") - } - - // disable comments and negation in Minimatch. - // Note that they are not supported in Glob itself anyway. - options.nonegate = true - options.nocomment = true - - self.minimatch = new Minimatch(pattern, options) - self.options = self.minimatch.options -} - -function finish (self) { - var nou = self.nounique - var all = nou ? [] : Object.create(null) - - for (var i = 0, l = self.matches.length; i < l; i ++) { - var matches = self.matches[i] - if (!matches || Object.keys(matches).length === 0) { - if (self.nonull) { - // do like the shell, and spit out the literal glob - var literal = self.minimatch.globSet[i] - if (nou) - all.push(literal) - else - all[literal] = true - } - } else { - // had matches - var m = Object.keys(matches) - if (nou) - all.push.apply(all, m) - else - m.forEach(function (m) { - all[m] = true - }) - } - } - - if (!nou) - all = Object.keys(all) - - if (!self.nosort) - all = all.sort(alphasort) - - // at *some* point we statted all of these - if (self.mark) { - for (var i = 0; i < all.length; i++) { - all[i] = self._mark(all[i]) - } - if (self.nodir) { - all = all.filter(function (e) { - var notDir = !(/\/$/.test(e)) - var c = self.cache[e] || self.cache[makeAbs(self, e)] - if (notDir && c) - notDir = c !== 'DIR' && !Array.isArray(c) - return notDir - }) - } - } - - if (self.ignore.length) - all = all.filter(function(m) { - return !isIgnored(self, m) - }) - - self.found = all -} - -function mark (self, p) { - var abs = makeAbs(self, p) - var c = self.cache[abs] - var m = p - if (c) { - var isDir = c === 'DIR' || Array.isArray(c) - var slash = p.slice(-1) === '/' - - if (isDir && !slash) - m += '/' - else if (!isDir && slash) - m = m.slice(0, -1) - - if (m !== p) { - var mabs = makeAbs(self, m) - self.statCache[mabs] = self.statCache[abs] - self.cache[mabs] = self.cache[abs] - } - } - - return m -} - -// lotta situps... -function makeAbs (self, f) { - var abs = f - if (f.charAt(0) === '/') { - abs = path.join(self.root, f) - } else if (isAbsolute(f) || f === '') { - abs = f - } else if (self.changedCwd) { - abs = path.resolve(self.cwd, f) - } else { - abs = path.resolve(f) - } - - if (process.platform === 'win32') - abs = abs.replace(/\\/g, '/') - - return abs -} - - -// Return true, if pattern ends with globstar '**', for the accompanying parent directory. -// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents -function isIgnored (self, path) { - if (!self.ignore.length) - return false - - return self.ignore.some(function(item) { - return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) - }) -} - -function childrenIgnored (self, path) { - if (!self.ignore.length) - return false - - return self.ignore.some(function(item) { - return !!(item.gmatcher && item.gmatcher.match(path)) - }) -} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/glob.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/glob.js deleted file mode 100644 index 2112a957dc501..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/glob.js +++ /dev/null @@ -1,790 +0,0 @@ -// Approach: -// -// 1. Get the minimatch set -// 2. For each pattern in the set, PROCESS(pattern, false) -// 3. Store matches per-set, then uniq them -// -// PROCESS(pattern, inGlobStar) -// Get the first [n] items from pattern that are all strings -// Join these together. This is PREFIX. -// If there is no more remaining, then stat(PREFIX) and -// add to matches if it succeeds. END. -// -// If inGlobStar and PREFIX is symlink and points to dir -// set ENTRIES = [] -// else readdir(PREFIX) as ENTRIES -// If fail, END -// -// with ENTRIES -// If pattern[n] is GLOBSTAR -// // handle the case where the globstar match is empty -// // by pruning it out, and testing the resulting pattern -// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) -// // handle other cases. -// for ENTRY in ENTRIES (not dotfiles) -// // attach globstar + tail onto the entry -// // Mark that this entry is a globstar match -// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) -// -// else // not globstar -// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) -// Test ENTRY against pattern[n] -// If fails, continue -// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) -// -// Caveat: -// Cache all stats and readdirs results to minimize syscall. Since all -// we ever care about is existence and directory-ness, we can just keep -// `true` for files, and [children,...] for directories, or `false` for -// things that don't exist. - -module.exports = glob - -var rp = require('fs.realpath') -var minimatch = require('minimatch') -var Minimatch = minimatch.Minimatch -var inherits = require('inherits') -var EE = require('events').EventEmitter -var path = require('path') -var assert = require('assert') -var isAbsolute = require('path').isAbsolute -var globSync = require('./sync.js') -var common = require('./common.js') -var setopts = common.setopts -var ownProp = common.ownProp -var inflight = require('inflight') -var util = require('util') -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -var once = require('once') - -function glob (pattern, options, cb) { - if (typeof options === 'function') cb = options, options = {} - if (!options) options = {} - - if (options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return globSync(pattern, options) - } - - return new Glob(pattern, options, cb) -} - -glob.sync = globSync -var GlobSync = glob.GlobSync = globSync.GlobSync - -// old api surface -glob.glob = glob - -function extend (origin, add) { - if (add === null || typeof add !== 'object') { - return origin - } - - var keys = Object.keys(add) - var i = keys.length - while (i--) { - origin[keys[i]] = add[keys[i]] - } - return origin -} - -glob.hasMagic = function (pattern, options_) { - var options = extend({}, options_) - options.noprocess = true - - var g = new Glob(pattern, options) - var set = g.minimatch.set - - if (!pattern) - return false - - if (set.length > 1) - return true - - for (var j = 0; j < set[0].length; j++) { - if (typeof set[0][j] !== 'string') - return true - } - - return false -} - -glob.Glob = Glob -inherits(Glob, EE) -function Glob (pattern, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - - if (options && options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return new GlobSync(pattern, options) - } - - if (!(this instanceof Glob)) - return new Glob(pattern, options, cb) - - setopts(this, pattern, options) - this._didRealPath = false - - // process each pattern in the minimatch set - var n = this.minimatch.set.length - - // The matches are stored as {: true,...} so that - // duplicates are automagically pruned. - // Later, we do an Object.keys() on these. - // Keep them as a list so we can fill in when nonull is set. - this.matches = new Array(n) - - if (typeof cb === 'function') { - cb = once(cb) - this.on('error', cb) - this.on('end', function (matches) { - cb(null, matches) - }) - } - - var self = this - this._processing = 0 - - this._emitQueue = [] - this._processQueue = [] - this.paused = false - - if (this.noprocess) - return this - - if (n === 0) - return done() - - var sync = true - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false, done) - } - sync = false - - function done () { - --self._processing - if (self._processing <= 0) { - if (sync) { - process.nextTick(function () { - self._finish() - }) - } else { - self._finish() - } - } - } -} - -Glob.prototype._finish = function () { - assert(this instanceof Glob) - if (this.aborted) - return - - if (this.realpath && !this._didRealpath) - return this._realpath() - - common.finish(this) - this.emit('end', this.found) -} - -Glob.prototype._realpath = function () { - if (this._didRealpath) - return - - this._didRealpath = true - - var n = this.matches.length - if (n === 0) - return this._finish() - - var self = this - for (var i = 0; i < this.matches.length; i++) - this._realpathSet(i, next) - - function next () { - if (--n === 0) - self._finish() - } -} - -Glob.prototype._realpathSet = function (index, cb) { - var matchset = this.matches[index] - if (!matchset) - return cb() - - var found = Object.keys(matchset) - var self = this - var n = found.length - - if (n === 0) - return cb() - - var set = this.matches[index] = Object.create(null) - found.forEach(function (p, i) { - // If there's a problem with the stat, then it means that - // one or more of the links in the realpath couldn't be - // resolved. just return the abs value in that case. - p = self._makeAbs(p) - rp.realpath(p, self.realpathCache, function (er, real) { - if (!er) - set[real] = true - else if (er.syscall === 'stat') - set[p] = true - else - self.emit('error', er) // srsly wtf right here - - if (--n === 0) { - self.matches[index] = set - cb() - } - }) - }) -} - -Glob.prototype._mark = function (p) { - return common.mark(this, p) -} - -Glob.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} - -Glob.prototype.abort = function () { - this.aborted = true - this.emit('abort') -} - -Glob.prototype.pause = function () { - if (!this.paused) { - this.paused = true - this.emit('pause') - } -} - -Glob.prototype.resume = function () { - if (this.paused) { - this.emit('resume') - this.paused = false - if (this._emitQueue.length) { - var eq = this._emitQueue.slice(0) - this._emitQueue.length = 0 - for (var i = 0; i < eq.length; i ++) { - var e = eq[i] - this._emitMatch(e[0], e[1]) - } - } - if (this._processQueue.length) { - var pq = this._processQueue.slice(0) - this._processQueue.length = 0 - for (var i = 0; i < pq.length; i ++) { - var p = pq[i] - this._processing-- - this._process(p[0], p[1], p[2], p[3]) - } - } - } -} - -Glob.prototype._process = function (pattern, index, inGlobStar, cb) { - assert(this instanceof Glob) - assert(typeof cb === 'function') - - if (this.aborted) - return - - this._processing++ - if (this.paused) { - this._processQueue.push([pattern, index, inGlobStar, cb]) - return - } - - //console.error('PROCESS %d', this._processing, pattern) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. - - // see if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index, cb) - return - - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break - - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } - - var remain = pattern.slice(n) - - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || - isAbsolute(pattern.map(function (p) { - return typeof p === 'string' ? p : '[*]' - }).join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix - - var abs = this._makeAbs(read) - - //if ignored, skip _processing - if (childrenIgnored(this, read)) - return cb() - - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) -} - -Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - -Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return cb() - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } - - //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) - - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return cb() - - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. - - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return cb() - } - - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - this._process([e].concat(remain), index, inGlobStar, cb) - } - cb() -} - -Glob.prototype._emitMatch = function (index, e) { - if (this.aborted) - return - - if (isIgnored(this, e)) - return - - if (this.paused) { - this._emitQueue.push([index, e]) - return - } - - var abs = isAbsolute(e) ? e : this._makeAbs(e) - - if (this.mark) - e = this._mark(e) - - if (this.absolute) - e = abs - - if (this.matches[index][e]) - return - - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } - - this.matches[index][e] = true - - var st = this.statCache[abs] - if (st) - this.emit('stat', e, st) - - this.emit('match', e) -} - -Glob.prototype._readdirInGlobStar = function (abs, cb) { - if (this.aborted) - return - - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false, cb) - - var lstatkey = 'lstat\0' + abs - var self = this - var lstatcb = inflight(lstatkey, lstatcb_) - - if (lstatcb) - self.fs.lstat(abs, lstatcb) - - function lstatcb_ (er, lstat) { - if (er && er.code === 'ENOENT') - return cb() - - var isSym = lstat && lstat.isSymbolicLink() - self.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) { - self.cache[abs] = 'FILE' - cb() - } else - self._readdir(abs, false, cb) - } -} - -Glob.prototype._readdir = function (abs, inGlobStar, cb) { - if (this.aborted) - return - - cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) - if (!cb) - return - - //console.error('RD %j %j', +inGlobStar, abs) - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs, cb) - - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return cb() - - if (Array.isArray(c)) - return cb(null, c) - } - - var self = this - self.fs.readdir(abs, readdirCb(this, abs, cb)) -} - -function readdirCb (self, abs, cb) { - return function (er, entries) { - if (er) - self._readdirError(abs, er, cb) - else - self._readdirEntries(abs, entries, cb) - } -} - -Glob.prototype._readdirEntries = function (abs, entries, cb) { - if (this.aborted) - return - - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } - } - - this.cache[abs] = entries - return cb(null, entries) -} - -Glob.prototype._readdirError = function (f, er, cb) { - if (this.aborted) - return - - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - this.emit('error', error) - this.abort() - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break - - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) { - this.emit('error', er) - // If the error is handled, then we abort - // if not, we threw out of here - this.abort() - } - if (!this.silent) - console.error('glob error', er) - break - } - - return cb() -} - -Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - - -Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - //console.error('pgs2', prefix, remain[0], entries) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return cb() - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false, cb) - - var isSym = this.symlinks[abs] - var len = entries.length - - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return cb() - - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue - - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true, cb) - - var below = gspref.concat(entries[i], remain) - this._process(below, index, true, cb) - } - - cb() -} - -Glob.prototype._processSimple = function (prefix, index, cb) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var self = this - this._stat(prefix, function (er, exists) { - self._processSimple2(prefix, index, er, exists, cb) - }) -} -Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { - - //console.error('ps2', prefix, exists) - - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - // If it doesn't exist, then just mark the lack of results - if (!exists) - return cb() - - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } - - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') - - // Mark this as a match - this._emitMatch(index, prefix) - cb() -} - -// Returns either 'DIR', 'FILE', or false -Glob.prototype._stat = function (f, cb) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return cb() - - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] - - if (Array.isArray(c)) - c = 'DIR' - - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return cb(null, c) - - if (needDir && c === 'FILE') - return cb() - - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } - - var exists - var stat = this.statCache[abs] - if (stat !== undefined) { - if (stat === false) - return cb(null, stat) - else { - var type = stat.isDirectory() ? 'DIR' : 'FILE' - if (needDir && type === 'FILE') - return cb() - else - return cb(null, type, stat) - } - } - - var self = this - var statcb = inflight('stat\0' + abs, lstatcb_) - if (statcb) - self.fs.lstat(abs, statcb) - - function lstatcb_ (er, lstat) { - if (lstat && lstat.isSymbolicLink()) { - // If it's a symlink, then treat it as the target, unless - // the target does not exist, then treat it as a file. - return self.fs.stat(abs, function (er, stat) { - if (er) - self._stat2(f, abs, null, lstat, cb) - else - self._stat2(f, abs, er, stat, cb) - }) - } else { - self._stat2(f, abs, er, lstat, cb) - } - } -} - -Glob.prototype._stat2 = function (f, abs, er, stat, cb) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return cb() - } - - var needDir = f.slice(-1) === '/' - this.statCache[abs] = stat - - if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) - return cb(null, false, stat) - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return cb() - - return cb(null, c, stat) -} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json deleted file mode 100644 index ca0fd916211b5..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "name": "glob", - "description": "a little globber", - "version": "8.1.0", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-glob.git" - }, - "main": "glob.js", - "files": [ - "glob.js", - "sync.js", - "common.js" - ], - "engines": { - "node": ">=12" - }, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^5.0.1", - "once": "^1.3.0" - }, - "devDependencies": { - "memfs": "^3.2.0", - "mkdirp": "0", - "rimraf": "^2.2.8", - "tap": "^16.0.1", - "tick": "0.0.6" - }, - "tap": { - "before": "test/00-setup.js", - "after": "test/zz-cleanup.js", - "statements": 90, - "branches": 90, - "functions": 90, - "lines": 90, - "jobs": 1 - }, - "scripts": { - "prepublish": "npm run benchclean", - "profclean": "rm -f v8.log profile.txt", - "test": "tap", - "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", - "bench": "bash benchmark.sh", - "prof": "bash prof.sh && cat profile.txt", - "benchclean": "node benchclean.js" - }, - "license": "ISC", - "funding": { - "url": "https://github.com/sponsors/isaacs" - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/sync.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/sync.js deleted file mode 100644 index af4600dd59508..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/sync.js +++ /dev/null @@ -1,486 +0,0 @@ -module.exports = globSync -globSync.GlobSync = GlobSync - -var rp = require('fs.realpath') -var minimatch = require('minimatch') -var Minimatch = minimatch.Minimatch -var Glob = require('./glob.js').Glob -var util = require('util') -var path = require('path') -var assert = require('assert') -var isAbsolute = require('path').isAbsolute -var common = require('./common.js') -var setopts = common.setopts -var ownProp = common.ownProp -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -function globSync (pattern, options) { - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') - - return new GlobSync(pattern, options).found -} - -function GlobSync (pattern, options) { - if (!pattern) - throw new Error('must provide pattern') - - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') - - if (!(this instanceof GlobSync)) - return new GlobSync(pattern, options) - - setopts(this, pattern, options) - - if (this.noprocess) - return this - - var n = this.minimatch.set.length - this.matches = new Array(n) - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false) - } - this._finish() -} - -GlobSync.prototype._finish = function () { - assert.ok(this instanceof GlobSync) - if (this.realpath) { - var self = this - this.matches.forEach(function (matchset, index) { - var set = self.matches[index] = Object.create(null) - for (var p in matchset) { - try { - p = self._makeAbs(p) - var real = rp.realpathSync(p, self.realpathCache) - set[real] = true - } catch (er) { - if (er.syscall === 'stat') - set[self._makeAbs(p)] = true - else - throw er - } - } - }) - } - common.finish(this) -} - - -GlobSync.prototype._process = function (pattern, index, inGlobStar) { - assert.ok(this instanceof GlobSync) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. - - // See if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index) - return - - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break - - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } - - var remain = pattern.slice(n) - - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || - isAbsolute(pattern.map(function (p) { - return typeof p === 'string' ? p : '[*]' - }).join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix - - var abs = this._makeAbs(read) - - //if ignored, skip processing - if (childrenIgnored(this, read)) - return - - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar) -} - - -GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar) - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } - - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return - - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. - - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix.slice(-1) !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return - } - - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) - newPattern = [prefix, e] - else - newPattern = [e] - this._process(newPattern.concat(remain), index, inGlobStar) - } -} - - -GlobSync.prototype._emitMatch = function (index, e) { - if (isIgnored(this, e)) - return - - var abs = this._makeAbs(e) - - if (this.mark) - e = this._mark(e) - - if (this.absolute) { - e = abs - } - - if (this.matches[index][e]) - return - - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } - - this.matches[index][e] = true - - if (this.stat) - this._stat(e) -} - - -GlobSync.prototype._readdirInGlobStar = function (abs) { - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false) - - var entries - var lstat - var stat - try { - lstat = this.fs.lstatSync(abs) - } catch (er) { - if (er.code === 'ENOENT') { - // lstat failed, doesn't exist - return null - } - } - - var isSym = lstat && lstat.isSymbolicLink() - this.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) - this.cache[abs] = 'FILE' - else - entries = this._readdir(abs, false) - - return entries -} - -GlobSync.prototype._readdir = function (abs, inGlobStar) { - var entries - - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs) - - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return null - - if (Array.isArray(c)) - return c - } - - try { - return this._readdirEntries(abs, this.fs.readdirSync(abs)) - } catch (er) { - this._readdirError(abs, er) - return null - } -} - -GlobSync.prototype._readdirEntries = function (abs, entries) { - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } - } - - this.cache[abs] = entries - - // mark and cache dir-ness - return entries -} - -GlobSync.prototype._readdirError = function (f, er) { - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - throw error - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break - - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) - throw er - if (!this.silent) - console.error('glob error', er) - break - } -} - -GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { - - var entries = this._readdir(abs, inGlobStar) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false) - - var len = entries.length - var isSym = this.symlinks[abs] - - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return - - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue - - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true) - - var below = gspref.concat(entries[i], remain) - this._process(below, index, true) - } -} - -GlobSync.prototype._processSimple = function (prefix, index) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var exists = this._stat(prefix) - - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - // If it doesn't exist, then just mark the lack of results - if (!exists) - return - - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } - - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') - - // Mark this as a match - this._emitMatch(index, prefix) -} - -// Returns either 'DIR', 'FILE', or false -GlobSync.prototype._stat = function (f) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return false - - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] - - if (Array.isArray(c)) - c = 'DIR' - - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return c - - if (needDir && c === 'FILE') - return false - - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } - - var exists - var stat = this.statCache[abs] - if (!stat) { - var lstat - try { - lstat = this.fs.lstatSync(abs) - } catch (er) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return false - } - } - - if (lstat && lstat.isSymbolicLink()) { - try { - stat = this.fs.statSync(abs) - } catch (er) { - stat = lstat - } - } else { - stat = lstat - } - } - - this.statCache[abs] = stat - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return false - - return c -} - -GlobSync.prototype._mark = function (p) { - return common.mark(this, p) -} - -GlobSync.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE deleted file mode 100644 index 1493534e60dce..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/lib/path.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/lib/path.js deleted file mode 100644 index ffe453d9e0557..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/lib/path.js +++ /dev/null @@ -1,4 +0,0 @@ -const isWindows = typeof process === 'object' && - process && - process.platform === 'win32' -module.exports = isWindows ? { sep: '\\' } : { sep: '/' } diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/minimatch.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/minimatch.js deleted file mode 100644 index 6c8bfc35181c6..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/minimatch.js +++ /dev/null @@ -1,944 +0,0 @@ -const minimatch = module.exports = (p, pattern, options = {}) => { - assertValidPattern(pattern) - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } - - return new Minimatch(pattern, options).match(p) -} - -module.exports = minimatch - -const path = require('./lib/path.js') -minimatch.sep = path.sep - -const GLOBSTAR = Symbol('globstar **') -minimatch.GLOBSTAR = GLOBSTAR -const expand = require('brace-expansion') - -const plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } -} - -// any single thing other than / -// don't need to escape / when using new RegExp() -const qmark = '[^/]' - -// * => any number of characters -const star = qmark + '*?' - -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' - -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' - -// "abc" -> { a:true, b:true, c:true } -const charSet = s => s.split('').reduce((set, c) => { - set[c] = true - return set -}, {}) - -// characters that need to be escaped in RegExp. -const reSpecials = charSet('().*{}+?[]^$\\!') - -// characters that indicate we have to add the pattern start -const addPatternStartSet = charSet('[.(') - -// normalizes slashes. -const slashSplit = /\/+/ - -minimatch.filter = (pattern, options = {}) => - (p, i, list) => minimatch(p, pattern, options) - -const ext = (a, b = {}) => { - const t = {} - Object.keys(a).forEach(k => t[k] = a[k]) - Object.keys(b).forEach(k => t[k] = b[k]) - return t -} - -minimatch.defaults = def => { - if (!def || typeof def !== 'object' || !Object.keys(def).length) { - return minimatch - } - - const orig = minimatch - - const m = (p, pattern, options) => orig(p, pattern, ext(def, options)) - m.Minimatch = class Minimatch extends orig.Minimatch { - constructor (pattern, options) { - super(pattern, ext(def, options)) - } - } - m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch - m.filter = (pattern, options) => orig.filter(pattern, ext(def, options)) - m.defaults = options => orig.defaults(ext(def, options)) - m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options)) - m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options)) - m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options)) - - return m -} - - - - - -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = (pattern, options) => braceExpand(pattern, options) - -const braceExpand = (pattern, options = {}) => { - assertValidPattern(pattern) - - // Thanks to Yeting Li for - // improving this regexp to avoid a ReDOS vulnerability. - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - // shortcut. no need to expand. - return [pattern] - } - - return expand(pattern) -} - -const MAX_PATTERN_LENGTH = 1024 * 64 -const assertValidPattern = pattern => { - if (typeof pattern !== 'string') { - throw new TypeError('invalid pattern') - } - - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError('pattern is too long') - } -} - -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -const SUBPARSE = Symbol('subparse') - -minimatch.makeRe = (pattern, options) => - new Minimatch(pattern, options || {}).makeRe() - -minimatch.match = (list, pattern, options = {}) => { - const mm = new Minimatch(pattern, options) - list = list.filter(f => mm.match(f)) - if (mm.options.nonull && !list.length) { - list.push(pattern) - } - return list -} - -// replace stuff like \* with * -const globUnescape = s => s.replace(/\\(.)/g, '$1') -const charUnescape = s => s.replace(/\\([^-\]])/g, '$1') -const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') -const braExpEscape = s => s.replace(/[[\]\\]/g, '\\$&') - -class Minimatch { - constructor (pattern, options) { - assertValidPattern(pattern) - - if (!options) options = {} - - this.options = options - this.set = [] - this.pattern = pattern - this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || - options.allowWindowsEscape === false - if (this.windowsPathsNoEscape) { - this.pattern = this.pattern.replace(/\\/g, '/') - } - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - this.partial = !!options.partial - - // make the set of regexps etc. - this.make() - } - - debug () {} - - make () { - const pattern = this.pattern - const options = this.options - - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } - - // step 1: figure out negation, etc. - this.parseNegate() - - // step 2: expand braces - let set = this.globSet = this.braceExpand() - - if (options.debug) this.debug = (...args) => console.error(...args) - - this.debug(this.pattern, set) - - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(s => s.split(slashSplit)) - - this.debug(this.pattern, set) - - // glob --> regexps - set = set.map((s, si, set) => s.map(this.parse, this)) - - this.debug(this.pattern, set) - - // filter out everything that didn't compile properly. - set = set.filter(s => s.indexOf(false) === -1) - - this.debug(this.pattern, set) - - this.set = set - } - - parseNegate () { - if (this.options.nonegate) return - - const pattern = this.pattern - let negate = false - let negateOffset = 0 - - for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) { - negate = !negate - negateOffset++ - } - - if (negateOffset) this.pattern = pattern.slice(negateOffset) - this.negate = negate - } - - // set partial to true to test if, for example, - // "/a/b" matches the start of "/*/b/*/d" - // Partial means, if you run out of file before you run - // out of pattern, then that's fine, as long as all - // the parts match. - matchOne (file, pattern, partial) { - var options = this.options - - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) - - this.debug('matchOne', file.length, pattern.length) - - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] - - this.debug(pattern, p, f) - - // should be impossible. - // some invalid regexp stuff in the set. - /* istanbul ignore if */ - if (p === false) return false - - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) - - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true - } - - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] - - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) - - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } - - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } - - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - /* istanbul ignore if */ - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true - } - return false - } - - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - hit = f === p - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) - } - - if (!hit) return false - } - - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* - - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else /* istanbul ignore else */ if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - return (fi === fl - 1) && (file[fi] === '') - } - - // should be unreachable. - /* istanbul ignore next */ - throw new Error('wtf?') - } - - braceExpand () { - return braceExpand(this.pattern, this.options) - } - - parse (pattern, isSub) { - assertValidPattern(pattern) - - const options = this.options - - // shortcuts - if (pattern === '**') { - if (!options.noglobstar) - return GLOBSTAR - else - pattern = '*' - } - if (pattern === '') return '' - - let re = '' - let hasMagic = false - let escaping = false - // ? => one single character - const patternListStack = [] - const negativeLists = [] - let stateChar - let inClass = false - let reClassStart = -1 - let classStart = -1 - let cs - let pl - let sp - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. However, if the pattern - // starts with ., then traversal patterns can match. - let dotTravAllowed = pattern.charAt(0) === '.' - let dotFileAllowed = options.dot || dotTravAllowed - const patternStart = () => - dotTravAllowed - ? '' - : dotFileAllowed - ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' - : '(?!\\.)' - const subPatternStart = (p) => - p.charAt(0) === '.' - ? '' - : options.dot - ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' - : '(?!\\.)' - - - const clearStateChar = () => { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - this.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } - - for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) - - // skip over any that are escaped. - if (escaping) { - /* istanbul ignore next - completely not allowed, even escaped. */ - if (c === '/') { - return false - } - - if (reSpecials[c]) { - re += '\\' - } - re += c - escaping = false - continue - } - - switch (c) { - /* istanbul ignore next */ - case '/': { - // Should already be path-split by now. - return false - } - - case '\\': - if (inClass && pattern.charAt(i + 1) === '-') { - re += c - continue - } - - clearStateChar() - escaping = true - continue - - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) - - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } - - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - this.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue - - case '(': { - if (inClass) { - re += '(' - continue - } - - if (!stateChar) { - re += '\\(' - continue - } - - const plEntry = { - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close, - } - this.debug(this.pattern, '\t', plEntry) - patternListStack.push(plEntry) - // negation is (?:(?!(?:js)(?:))[^/]*) - re += plEntry.open - // next entry starts with a dot maybe? - if (plEntry.start === 0 && plEntry.type !== '!') { - dotTravAllowed = true - re += subPatternStart(pattern.slice(i + 1)) - } - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue - } - - case ')': { - const plEntry = patternListStack[patternListStack.length - 1] - if (inClass || !plEntry) { - re += '\\)' - continue - } - patternListStack.pop() - - // closing an extglob - clearStateChar() - hasMagic = true - pl = plEntry - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(Object.assign(pl, { reEnd: re.length })) - } - continue - } - - case '|': { - const plEntry = patternListStack[patternListStack.length - 1] - if (inClass || !plEntry) { - re += '\\|' - continue - } - - clearStateChar() - re += '|' - // next subpattern can start with a dot? - if (plEntry.start === 0 && plEntry.type !== '!') { - dotTravAllowed = true - re += subPatternStart(pattern.slice(i + 1)) - } - continue - } - - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() - - if (inClass) { - re += '\\' + c - continue - } - - inClass = true - classStart = i - reClassStart = re.length - re += c - continue - - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - continue - } - - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + braExpEscape(charUnescape(cs)) + ']') - // looks good, finish up the class. - re += c - } catch (er) { - // out of order ranges in JS are errors, but in glob syntax, - // they're just a range that matches nothing. - re = re.substring(0, reClassStart) + '(?:$.)' // match nothing ever - } - hasMagic = true - inClass = false - continue - - default: - // swallow any state char that wasn't consumed - clearStateChar() - - if (reSpecials[c] && !(c === '^' && inClass)) { - re += '\\' - } - - re += c - break - - } // switch - } // for - - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.slice(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substring(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] - } - - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - let tail - tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => { - /* istanbul ignore else - should already be done */ - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } - - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) - - this.debug('tail=%j\n %s', tail, tail, pl, re) - const t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type - - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } - - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } - - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - const addPatternStart = addPatternStartSet[re.charAt(0)] - - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (let n = negativeLists.length - 1; n > -1; n--) { - const nl = negativeLists[n] - - const nlBefore = re.slice(0, nl.reStart) - const nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - let nlAfter = re.slice(nl.reEnd) - const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter - - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - const closeParensBefore = nlBefore.split(')').length - const openParensBefore = nlBefore.split('(').length - closeParensBefore - let cleanAfter = nlAfter - for (let i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter - - const dollar = nlAfter === '' && isSub !== SUBPARSE ? '(?:$|\\/)' : '' - - re = nlBefore + nlFirst + nlAfter + dollar + nlLast - } - - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } - - if (addPatternStart) { - re = patternStart() + re - } - - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } - - // if it's nocase, and the lcase/uppercase don't match, it's magic - if (options.nocase && !hasMagic) { - hasMagic = pattern.toUpperCase() !== pattern.toLowerCase() - } - - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } - - const flags = options.nocase ? 'i' : '' - try { - return Object.assign(new RegExp('^' + re + '$', flags), { - _glob: pattern, - _src: re, - }) - } catch (er) /* istanbul ignore next - should be impossible */ { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') - } - } - - makeRe () { - if (this.regexp || this.regexp === false) return this.regexp - - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - const set = this.set - - if (!set.length) { - this.regexp = false - return this.regexp - } - const options = this.options - - const twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - const flags = options.nocase ? 'i' : '' - - // coalesce globstars and regexpify non-globstar patterns - // if it's the only item, then we just do one twoStar - // if it's the first, and there are more, prepend (\/|twoStar\/)? to next - // if it's the last, append (\/twoStar|) to previous - // if it's in the middle, append (\/|\/twoStar\/) to previous - // then filter out GLOBSTAR symbols - let re = set.map(pattern => { - pattern = pattern.map(p => - typeof p === 'string' ? regExpEscape(p) - : p === GLOBSTAR ? GLOBSTAR - : p._src - ).reduce((set, p) => { - if (!(set[set.length - 1] === GLOBSTAR && p === GLOBSTAR)) { - set.push(p) - } - return set - }, []) - pattern.forEach((p, i) => { - if (p !== GLOBSTAR || pattern[i-1] === GLOBSTAR) { - return - } - if (i === 0) { - if (pattern.length > 1) { - pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1] - } else { - pattern[i] = twoStar - } - } else if (i === pattern.length - 1) { - pattern[i-1] += '(?:\\\/|' + twoStar + ')?' - } else { - pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1] - pattern[i+1] = GLOBSTAR - } - }) - return pattern.filter(p => p !== GLOBSTAR).join('/') - }).join('|') - - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' - - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' - - try { - this.regexp = new RegExp(re, flags) - } catch (ex) /* istanbul ignore next - should be impossible */ { - this.regexp = false - } - return this.regexp - } - - match (f, partial = this.partial) { - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - - if (f === '/' && partial) return true - - const options = this.options - - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') - } - - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) - - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. - - const set = this.set - this.debug(this.pattern, 'set', set) - - // Find the basename of the path by looking for the last non-empty segment - let filename - for (let i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } - - for (let i = 0; i < set.length; i++) { - const pattern = set[i] - let file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - const hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate - } - } - - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate - } - - static defaults (def) { - return minimatch.defaults(def).Minimatch - } -} - -minimatch.Minimatch = Minimatch diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json deleted file mode 100644 index c8809dbb3119d..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "author": "Isaac Z. Schlueter (http://blog.izs.me)", - "name": "minimatch", - "description": "a glob matcher in javascript", - "publishConfig": { - "tag": "legacy-v5" - }, - "version": "5.1.6", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/minimatch.git" - }, - "main": "minimatch.js", - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "engines": { - "node": ">=10" - }, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "devDependencies": { - "tap": "^16.3.2" - }, - "license": "ISC", - "files": [ - "minimatch.js", - "lib" - ] -} diff --git a/node_modules/node-gyp/node_modules/cacache/package.json b/node_modules/node-gyp/node_modules/cacache/package.json deleted file mode 100644 index 7dbd407d8fccf..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/package.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "name": "cacache", - "version": "16.1.3", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.js\"", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/cacache.git" - }, - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^2.1.0", - "@npmcli/move-file": "^2.0.0", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^8.0.1", - "infer-owner": "^1.0.4", - "lru-cache": "^7.7.1", - "minipass": "^3.1.6", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^9.0.0", - "tar": "^6.1.11", - "unique-filename": "^2.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, - "version": "3.5.0" - }, - "author": "GitHub Inc." -} diff --git a/node_modules/node-gyp/node_modules/fs-minipass/LICENSE b/node_modules/node-gyp/node_modules/fs-minipass/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/node-gyp/node_modules/fs-minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/fs-minipass/index.js b/node_modules/node-gyp/node_modules/fs-minipass/index.js deleted file mode 100644 index 9b0779c80c55e..0000000000000 --- a/node_modules/node-gyp/node_modules/fs-minipass/index.js +++ /dev/null @@ -1,422 +0,0 @@ -'use strict' -const MiniPass = require('minipass') -const EE = require('events').EventEmitter -const fs = require('fs') - -let writev = fs.writev -/* istanbul ignore next */ -if (!writev) { - // This entire block can be removed if support for earlier than Node.js - // 12.9.0 is not needed. - const binding = process.binding('fs') - const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback - - writev = (fd, iovec, pos, cb) => { - const done = (er, bw) => cb(er, bw, iovec) - const req = new FSReqWrap() - req.oncomplete = done - binding.writeBuffers(fd, iovec, pos, req) - } -} - -const _autoClose = Symbol('_autoClose') -const _close = Symbol('_close') -const _ended = Symbol('_ended') -const _fd = Symbol('_fd') -const _finished = Symbol('_finished') -const _flags = Symbol('_flags') -const _flush = Symbol('_flush') -const _handleChunk = Symbol('_handleChunk') -const _makeBuf = Symbol('_makeBuf') -const _mode = Symbol('_mode') -const _needDrain = Symbol('_needDrain') -const _onerror = Symbol('_onerror') -const _onopen = Symbol('_onopen') -const _onread = Symbol('_onread') -const _onwrite = Symbol('_onwrite') -const _open = Symbol('_open') -const _path = Symbol('_path') -const _pos = Symbol('_pos') -const _queue = Symbol('_queue') -const _read = Symbol('_read') -const _readSize = Symbol('_readSize') -const _reading = Symbol('_reading') -const _remain = Symbol('_remain') -const _size = Symbol('_size') -const _write = Symbol('_write') -const _writing = Symbol('_writing') -const _defaultFlag = Symbol('_defaultFlag') -const _errored = Symbol('_errored') - -class ReadStream extends MiniPass { - constructor (path, opt) { - opt = opt || {} - super(opt) - - this.readable = true - this.writable = false - - if (typeof path !== 'string') - throw new TypeError('path must be a string') - - this[_errored] = false - this[_fd] = typeof opt.fd === 'number' ? opt.fd : null - this[_path] = path - this[_readSize] = opt.readSize || 16*1024*1024 - this[_reading] = false - this[_size] = typeof opt.size === 'number' ? opt.size : Infinity - this[_remain] = this[_size] - this[_autoClose] = typeof opt.autoClose === 'boolean' ? - opt.autoClose : true - - if (typeof this[_fd] === 'number') - this[_read]() - else - this[_open]() - } - - get fd () { return this[_fd] } - get path () { return this[_path] } - - write () { - throw new TypeError('this is a readable stream') - } - - end () { - throw new TypeError('this is a readable stream') - } - - [_open] () { - fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) - } - - [_onopen] (er, fd) { - if (er) - this[_onerror](er) - else { - this[_fd] = fd - this.emit('open', fd) - this[_read]() - } - } - - [_makeBuf] () { - return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) - } - - [_read] () { - if (!this[_reading]) { - this[_reading] = true - const buf = this[_makeBuf]() - /* istanbul ignore if */ - if (buf.length === 0) - return process.nextTick(() => this[_onread](null, 0, buf)) - fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) => - this[_onread](er, br, buf)) - } - } - - [_onread] (er, br, buf) { - this[_reading] = false - if (er) - this[_onerror](er) - else if (this[_handleChunk](br, buf)) - this[_read]() - } - - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) - } - } - - [_onerror] (er) { - this[_reading] = true - this[_close]() - this.emit('error', er) - } - - [_handleChunk] (br, buf) { - let ret = false - // no effect if infinite - this[_remain] -= br - if (br > 0) - ret = super.write(br < buf.length ? buf.slice(0, br) : buf) - - if (br === 0 || this[_remain] <= 0) { - ret = false - this[_close]() - super.end() - } - - return ret - } - - emit (ev, data) { - switch (ev) { - case 'prefinish': - case 'finish': - break - - case 'drain': - if (typeof this[_fd] === 'number') - this[_read]() - break - - case 'error': - if (this[_errored]) - return - this[_errored] = true - return super.emit(ev, data) - - default: - return super.emit(ev, data) - } - } -} - -class ReadStreamSync extends ReadStream { - [_open] () { - let threw = true - try { - this[_onopen](null, fs.openSync(this[_path], 'r')) - threw = false - } finally { - if (threw) - this[_close]() - } - } - - [_read] () { - let threw = true - try { - if (!this[_reading]) { - this[_reading] = true - do { - const buf = this[_makeBuf]() - /* istanbul ignore next */ - const br = buf.length === 0 ? 0 - : fs.readSync(this[_fd], buf, 0, buf.length, null) - if (!this[_handleChunk](br, buf)) - break - } while (true) - this[_reading] = false - } - threw = false - } finally { - if (threw) - this[_close]() - } - } - - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.closeSync(fd) - this.emit('close') - } - } -} - -class WriteStream extends EE { - constructor (path, opt) { - opt = opt || {} - super(opt) - this.readable = false - this.writable = true - this[_errored] = false - this[_writing] = false - this[_ended] = false - this[_needDrain] = false - this[_queue] = [] - this[_path] = path - this[_fd] = typeof opt.fd === 'number' ? opt.fd : null - this[_mode] = opt.mode === undefined ? 0o666 : opt.mode - this[_pos] = typeof opt.start === 'number' ? opt.start : null - this[_autoClose] = typeof opt.autoClose === 'boolean' ? - opt.autoClose : true - - // truncating makes no sense when writing into the middle - const defaultFlag = this[_pos] !== null ? 'r+' : 'w' - this[_defaultFlag] = opt.flags === undefined - this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags - - if (this[_fd] === null) - this[_open]() - } - - emit (ev, data) { - if (ev === 'error') { - if (this[_errored]) - return - this[_errored] = true - } - return super.emit(ev, data) - } - - - get fd () { return this[_fd] } - get path () { return this[_path] } - - [_onerror] (er) { - this[_close]() - this[_writing] = true - this.emit('error', er) - } - - [_open] () { - fs.open(this[_path], this[_flags], this[_mode], - (er, fd) => this[_onopen](er, fd)) - } - - [_onopen] (er, fd) { - if (this[_defaultFlag] && - this[_flags] === 'r+' && - er && er.code === 'ENOENT') { - this[_flags] = 'w' - this[_open]() - } else if (er) - this[_onerror](er) - else { - this[_fd] = fd - this.emit('open', fd) - this[_flush]() - } - } - - end (buf, enc) { - if (buf) - this.write(buf, enc) - - this[_ended] = true - - // synthetic after-write logic, where drain/finish live - if (!this[_writing] && !this[_queue].length && - typeof this[_fd] === 'number') - this[_onwrite](null, 0) - return this - } - - write (buf, enc) { - if (typeof buf === 'string') - buf = Buffer.from(buf, enc) - - if (this[_ended]) { - this.emit('error', new Error('write() after end()')) - return false - } - - if (this[_fd] === null || this[_writing] || this[_queue].length) { - this[_queue].push(buf) - this[_needDrain] = true - return false - } - - this[_writing] = true - this[_write](buf) - return true - } - - [_write] (buf) { - fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => - this[_onwrite](er, bw)) - } - - [_onwrite] (er, bw) { - if (er) - this[_onerror](er) - else { - if (this[_pos] !== null) - this[_pos] += bw - if (this[_queue].length) - this[_flush]() - else { - this[_writing] = false - - if (this[_ended] && !this[_finished]) { - this[_finished] = true - this[_close]() - this.emit('finish') - } else if (this[_needDrain]) { - this[_needDrain] = false - this.emit('drain') - } - } - } - } - - [_flush] () { - if (this[_queue].length === 0) { - if (this[_ended]) - this[_onwrite](null, 0) - } else if (this[_queue].length === 1) - this[_write](this[_queue].pop()) - else { - const iovec = this[_queue] - this[_queue] = [] - writev(this[_fd], iovec, this[_pos], - (er, bw) => this[_onwrite](er, bw)) - } - } - - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) - } - } -} - -class WriteStreamSync extends WriteStream { - [_open] () { - let fd - // only wrap in a try{} block if we know we'll retry, to avoid - // the rethrow obscuring the error's source frame in most cases. - if (this[_defaultFlag] && this[_flags] === 'r+') { - try { - fd = fs.openSync(this[_path], this[_flags], this[_mode]) - } catch (er) { - if (er.code === 'ENOENT') { - this[_flags] = 'w' - return this[_open]() - } else - throw er - } - } else - fd = fs.openSync(this[_path], this[_flags], this[_mode]) - - this[_onopen](null, fd) - } - - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.closeSync(fd) - this.emit('close') - } - } - - [_write] (buf) { - // throw the original, but try to close if it fails - let threw = true - try { - this[_onwrite](null, - fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) - threw = false - } finally { - if (threw) - try { this[_close]() } catch (_) {} - } - } -} - -exports.ReadStream = ReadStream -exports.ReadStreamSync = ReadStreamSync - -exports.WriteStream = WriteStream -exports.WriteStreamSync = WriteStreamSync diff --git a/node_modules/node-gyp/node_modules/fs-minipass/package.json b/node_modules/node-gyp/node_modules/fs-minipass/package.json deleted file mode 100644 index 2f2436cb5c3b1..0000000000000 --- a/node_modules/node-gyp/node_modules/fs-minipass/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "fs-minipass", - "version": "2.1.0", - "main": "index.js", - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" - }, - "keywords": [], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/fs-minipass.git" - }, - "bugs": { - "url": "https://github.com/npm/fs-minipass/issues" - }, - "homepage": "https://github.com/npm/fs-minipass#readme", - "description": "fs read and write streams based on minipass", - "dependencies": { - "minipass": "^3.0.0" - }, - "devDependencies": { - "mutate-fs": "^2.0.1", - "tap": "^14.6.4" - }, - "files": [ - "index.js" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">= 8" - } -} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE deleted file mode 100644 index 1808eb2844231..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2017-2022 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js deleted file mode 100644 index dd68492ed7ea7..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js +++ /dev/null @@ -1,214 +0,0 @@ -'use strict' -const LRU = require('lru-cache') -const url = require('url') -const isLambda = require('is-lambda') -const dns = require('./dns.js') - -const AGENT_CACHE = new LRU({ max: 50 }) -const HttpAgent = require('agentkeepalive') -const HttpsAgent = HttpAgent.HttpsAgent - -module.exports = getAgent - -const getAgentTimeout = timeout => - typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 - -const getMaxSockets = maxSockets => maxSockets || 15 - -function getAgent (uri, opts) { - const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) - const isHttps = parsedUri.protocol === 'https:' - const pxuri = getProxyUri(parsedUri.href, opts) - - // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout - // of zero disables the timeout behavior (OS limits still apply). Else, if - // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that - // the node-fetch-npm timeout will always fire first, giving us more - // consistent errors. - const agentTimeout = getAgentTimeout(opts.timeout) - const agentMaxSockets = getMaxSockets(opts.maxSockets) - - const key = [ - `https:${isHttps}`, - pxuri - ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` - : '>no-proxy<', - `local-address:${opts.localAddress || '>no-local-address<'}`, - `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, - `ca:${(isHttps && opts.ca) || '>no-ca<'}`, - `cert:${(isHttps && opts.cert) || '>no-cert<'}`, - `key:${(isHttps && opts.key) || '>no-key<'}`, - `timeout:${agentTimeout}`, - `maxSockets:${agentMaxSockets}`, - ].join(':') - - if (opts.agent != null) { // `agent: false` has special behavior! - return opts.agent - } - - // keep alive in AWS lambda makes no sense - const lambdaAgent = !isLambda ? null - : isHttps ? require('https').globalAgent - : require('http').globalAgent - - if (isLambda && !pxuri) { - return lambdaAgent - } - - if (AGENT_CACHE.peek(key)) { - return AGENT_CACHE.get(key) - } - - if (pxuri) { - const pxopts = isLambda ? { - ...opts, - agent: lambdaAgent, - } : opts - const proxy = getProxy(pxuri, pxopts, isHttps) - AGENT_CACHE.set(key, proxy) - return proxy - } - - const agent = isHttps ? new HttpsAgent({ - maxSockets: agentMaxSockets, - ca: opts.ca, - cert: opts.cert, - key: opts.key, - localAddress: opts.localAddress, - rejectUnauthorized: opts.rejectUnauthorized, - timeout: agentTimeout, - freeSocketTimeout: 15000, - lookup: dns.getLookup(opts.dns), - }) : new HttpAgent({ - maxSockets: agentMaxSockets, - localAddress: opts.localAddress, - timeout: agentTimeout, - freeSocketTimeout: 15000, - lookup: dns.getLookup(opts.dns), - }) - AGENT_CACHE.set(key, agent) - return agent -} - -function checkNoProxy (uri, opts) { - const host = new url.URL(uri).hostname.split('.').reverse() - let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) - if (typeof noproxy === 'string') { - noproxy = noproxy.split(',').map(n => n.trim()) - } - - return noproxy && noproxy.some(no => { - const noParts = no.split('.').filter(x => x).reverse() - if (!noParts.length) { - return false - } - for (let i = 0; i < noParts.length; i++) { - if (host[i] !== noParts[i]) { - return false - } - } - return true - }) -} - -module.exports.getProcessEnv = getProcessEnv - -function getProcessEnv (env) { - if (!env) { - return - } - - let value - - if (Array.isArray(env)) { - for (const e of env) { - value = process.env[e] || - process.env[e.toUpperCase()] || - process.env[e.toLowerCase()] - if (typeof value !== 'undefined') { - break - } - } - } - - if (typeof env === 'string') { - value = process.env[env] || - process.env[env.toUpperCase()] || - process.env[env.toLowerCase()] - } - - return value -} - -module.exports.getProxyUri = getProxyUri -function getProxyUri (uri, opts) { - const protocol = new url.URL(uri).protocol - - const proxy = opts.proxy || - ( - protocol === 'https:' && - getProcessEnv('https_proxy') - ) || - ( - protocol === 'http:' && - getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) - ) - if (!proxy) { - return null - } - - const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy - - return !checkNoProxy(uri, opts) && parsedProxy -} - -const getAuth = u => - u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) - : u.username ? decodeURIComponent(u.username) - : null - -const getPath = u => u.pathname + u.search + u.hash - -const HttpProxyAgent = require('http-proxy-agent') -const HttpsProxyAgent = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') -module.exports.getProxy = getProxy -function getProxy (proxyUrl, opts, isHttps) { - // our current proxy agents do not support an overridden dns lookup method, so will not - // benefit from the dns cache - const popts = { - host: proxyUrl.hostname, - port: proxyUrl.port, - protocol: proxyUrl.protocol, - path: getPath(proxyUrl), - auth: getAuth(proxyUrl), - ca: opts.ca, - cert: opts.cert, - key: opts.key, - timeout: getAgentTimeout(opts.timeout), - localAddress: opts.localAddress, - maxSockets: getMaxSockets(opts.maxSockets), - rejectUnauthorized: opts.rejectUnauthorized, - } - - if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { - if (!isHttps) { - return new HttpProxyAgent(popts) - } else { - return new HttpsProxyAgent(popts) - } - } else if (proxyUrl.protocol.startsWith('socks')) { - // socks-proxy-agent uses hostname not host - popts.hostname = popts.host - delete popts.host - return new SocksProxyAgent(popts) - } else { - throw Object.assign( - new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), - { - code: 'EUNSUPPORTEDPROXY', - url: proxyUrl.href, - } - ) - } -} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js deleted file mode 100644 index dba89d715d8a5..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js +++ /dev/null @@ -1,444 +0,0 @@ -const { Request, Response } = require('minipass-fetch') -const Minipass = require('minipass') -const MinipassFlush = require('minipass-flush') -const cacache = require('cacache') -const url = require('url') - -const CachingMinipassPipeline = require('../pipeline.js') -const CachePolicy = require('./policy.js') -const cacheKey = require('./key.js') -const remote = require('../remote.js') - -const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) - -// allow list for request headers that will be written to the cache index -// note: we will also store any request headers -// that are named in a response's vary header -const KEEP_REQUEST_HEADERS = [ - 'accept-charset', - 'accept-encoding', - 'accept-language', - 'accept', - 'cache-control', -] - -// allow list for response headers that will be written to the cache index -// note: we must not store the real response's age header, or when we load -// a cache policy based on the metadata it will think the cached response -// is always stale -const KEEP_RESPONSE_HEADERS = [ - 'cache-control', - 'content-encoding', - 'content-language', - 'content-type', - 'date', - 'etag', - 'expires', - 'last-modified', - 'link', - 'location', - 'pragma', - 'vary', -] - -// return an object containing all metadata to be written to the index -const getMetadata = (request, response, options) => { - const metadata = { - time: Date.now(), - url: request.url, - reqHeaders: {}, - resHeaders: {}, - - // options on which we must match the request and vary the response - options: { - compress: options.compress != null ? options.compress : request.compress, - }, - } - - // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) { - metadata.status = response.status - } - - for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - - // if the request's host header differs from the host in the url - // we need to keep it, otherwise it's just noise and we ignore it - const host = request.headers.get('host') - const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) { - metadata.reqHeaders.host = host - } - - // if the response has a vary header, make sure - // we store the relevant request headers too - if (response.headers.has('vary')) { - const vary = response.headers.get('vary') - // a vary of "*" means every header causes a different response. - // in that scenario, we do not include any additional headers - // as the freshness check will always fail anyway and we don't - // want to bloat the cache indexes - if (vary !== '*') { - // copy any other request headers that will vary the response - const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) - for (const name of varyHeaders) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - } - } - - for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - return metadata -} - -// symbols used to hide objects that may be lazily evaluated in a getter -const _request = Symbol('request') -const _response = Symbol('response') -const _policy = Symbol('policy') - -class CacheEntry { - constructor ({ entry, request, response, options }) { - if (entry) { - this.key = entry.key - this.entry = entry - // previous versions of this module didn't write an explicit timestamp in - // the metadata, so fall back to the entry's timestamp. we can't use the - // entry timestamp to determine staleness because cacache will update it - // when it verifies its data - this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else { - this.key = cacheKey(request) - } - - this.options = options - - // these properties are behind getters that lazily evaluate - this[_request] = request - this[_response] = response - this[_policy] = null - } - - // returns a CacheEntry instance that satisfies the given request - // or undefined if no existing entry satisfies - static async find (request, options) { - try { - // compacts the index and returns an array of unique entries - var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { - const entryA = new CacheEntry({ entry: A, options }) - const entryB = new CacheEntry({ entry: B, options }) - return entryA.policy.satisfies(entryB.request) - }, { - validateEntry: (entry) => { - // clean out entries with a buggy content-encoding value - if (entry.metadata && - entry.metadata.resHeaders && - entry.metadata.resHeaders['content-encoding'] === null) { - return false - } - - // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) { - return !!(entry.metadata && entry.metadata.status) - } - - return true - }, - }) - } catch (err) { - // if the compact request fails, ignore the error and return - return - } - - // a cache mode of 'reload' means to behave as though we have no cache - // on the way to the network. return undefined to allow cacheFetch to - // create a brand new request no matter what. - if (options.cache === 'reload') { - return - } - - // find the specific entry that satisfies the request - let match - for (const entry of matches) { - const _entry = new CacheEntry({ - entry, - options, - }) - - if (_entry.policy.satisfies(request)) { - match = _entry - break - } - } - - return match - } - - // if the user made a PUT/POST/PATCH then we invalidate our - // cache for the same url by deleting the index entirely - static async invalidate (request, options) { - const key = cacheKey(request) - try { - await cacache.rm.entry(options.cachePath, key, { removeFully: true }) - } catch (err) { - // ignore errors - } - } - - get request () { - if (!this[_request]) { - this[_request] = new Request(this.entry.metadata.url, { - method: 'GET', - headers: this.entry.metadata.reqHeaders, - ...this.entry.metadata.options, - }) - } - - return this[_request] - } - - get response () { - if (!this[_response]) { - this[_response] = new Response(null, { - url: this.entry.metadata.url, - counter: this.options.counter, - status: this.entry.metadata.status || 200, - headers: { - ...this.entry.metadata.resHeaders, - 'content-length': this.entry.size, - }, - }) - } - - return this[_response] - } - - get policy () { - if (!this[_policy]) { - this[_policy] = new CachePolicy({ - entry: this.entry, - request: this.request, - response: this.response, - options: this.options, - }) - } - - return this[_policy] - } - - // wraps the response in a pipeline that stores the data - // in the cache while the user consumes it - async store (status) { - // if we got a status other than 200, 301, or 308, - // or the CachePolicy forbid storage, append the - // cache status header and return it untouched - if ( - this.request.method !== 'GET' || - ![200, 301, 308].includes(this.response.status) || - !this.policy.storable() - ) { - this.response.headers.set('x-local-cache-status', 'skip') - return this.response - } - - const size = this.response.headers.get('content-length') - const cacheOpts = { - algorithms: this.options.algorithms, - metadata: getMetadata(this.request, this.response, this.options), - size, - integrity: this.options.integrity, - integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, - } - - let body = null - // we only set a body if the status is a 200, redirects are - // stored as metadata only - if (this.response.status === 200) { - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }) - - body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - // this is always true since if we aren't reusing the one from the remote fetch, we - // are using the one from cacache - body.hasIntegrityEmitter = true - - const onResume = () => { - const tee = new Minipass() - const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) - // re-emit the integrity and size events on our new response body so they can be reused - cacheStream.on('integrity', i => body.emit('integrity', i)) - cacheStream.on('size', s => body.emit('size', s)) - // stick a flag on here so downstream users will know if they can expect integrity events - tee.pipe(cacheStream) - // TODO if the cache write fails, log a warning but return the response anyway - // eslint-disable-next-line promise/catch-or-return - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - body.unshift(tee) - body.unshift(this.response.body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - } else { - await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) - } - - // note: we do not set the x-local-cache-hash header because we do not know - // the hash value until after the write to the cache completes, which doesn't - // happen until after the response has been sent and it's too late to write - // the header anyway - this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - this.response.headers.set('x-local-cache-mode', 'stream') - this.response.headers.set('x-local-cache-status', status) - this.response.headers.set('x-local-cache-time', new Date().toISOString()) - const newResponse = new Response(body, { - url: this.response.url, - status: this.response.status, - headers: this.response.headers, - counter: this.options.counter, - }) - return newResponse - } - - // use the cached data to create a response and return it - async respond (method, options, status) { - let response - if (method === 'HEAD' || [301, 308].includes(this.response.status)) { - // if the request is a HEAD, or the response is a redirect, - // then the metadata in the entry already includes everything - // we need to build a response - response = this.response - } else { - // we're responding with a full cached response, so create a body - // that reads from cacache and attach it to a new Response - const body = new Minipass() - const headers = { ...this.policy.responseHeaders() } - const onResume = () => { - const cacheStream = cacache.get.stream.byDigest( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - cacheStream.on('error', async (err) => { - cacheStream.pause() - if (err.code === 'EINTEGRITY') { - await cacache.rm.content( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - } - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { - await CacheEntry.invalidate(this.request, this.options) - } - body.emit('error', err) - cacheStream.resume() - }) - // emit the integrity and size events based on our metadata so we're consistent - body.emit('integrity', this.entry.integrity) - body.emit('size', Number(headers['content-length'])) - cacheStream.pipe(body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - response = new Response(body, { - url: this.entry.metadata.url, - counter: options.counter, - status: 200, - headers, - }) - } - - response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) - response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - response.headers.set('x-local-cache-mode', 'stream') - response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) - return response - } - - // use the provided request along with this cache entry to - // revalidate the stored response. returns a response, either - // from the cache or from the update - async revalidate (request, options) { - const revalidateRequest = new Request(request, { - headers: this.policy.revalidationHeaders(request), - }) - - try { - // NOTE: be sure to remove the headers property from the - // user supplied options, since we have already defined - // them on the new request object. if they're still in the - // options then those will overwrite the ones from the policy - var response = await remote(revalidateRequest, { - ...options, - headers: undefined, - }) - } catch (err) { - // if the network fetch fails, return the stale - // cached response unless it has a cache-control - // of 'must-revalidate' - if (!this.policy.mustRevalidate) { - return this.respond(request.method, options, 'stale') - } - - throw err - } - - if (this.policy.revalidated(revalidateRequest, response)) { - // we got a 304, write a new index to the cache and respond from cache - const metadata = getMetadata(request, response, options) - // 304 responses do not include headers that are specific to the response data - // since they do not include a body, so we copy values for headers that were - // in the old cache entry to the new one, if the new metadata does not already - // include that header - for (const name of KEEP_RESPONSE_HEADERS) { - if ( - !hasOwnProperty(metadata.resHeaders, name) && - hasOwnProperty(this.entry.metadata.resHeaders, name) - ) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - } - - try { - await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { - size: this.entry.size, - metadata, - }) - } catch (err) { - // if updating the cache index fails, we ignore it and - // respond anyway - } - return this.respond(request.method, options, 'revalidated') - } - - // if we got a modified response, create a new entry based on it - const newEntry = new CacheEntry({ - request, - response, - options, - }) - - // respond with the new entry while writing it to the cache - return newEntry.store('updated') - } -} - -module.exports = CacheEntry diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js deleted file mode 100644 index 67a66573bebe6..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js +++ /dev/null @@ -1,11 +0,0 @@ -class NotCachedError extends Error { - constructor (url) { - /* eslint-disable-next-line max-len */ - super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) - this.code = 'ENOTCACHED' - } -} - -module.exports = { - NotCachedError, -} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js deleted file mode 100644 index 0de49d23fb933..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js +++ /dev/null @@ -1,49 +0,0 @@ -const { NotCachedError } = require('./errors.js') -const CacheEntry = require('./entry.js') -const remote = require('../remote.js') - -// do whatever is necessary to get a Response and return it -const cacheFetch = async (request, options) => { - // try to find a cached entry that satisfies this request - const entry = await CacheEntry.find(request, options) - if (!entry) { - // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') { - throw new NotCachedError(request.url) - } - - // otherwise, we make a request, store it and return it - const response = await remote(request, options) - const newEntry = new CacheEntry({ request, response, options }) - return newEntry.store('miss') - } - - // we have a cached response that satisfies this request, however if the cache - // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') { - return entry.revalidate(request, options) - } - - // if the cached entry is not stale, or if the cache mode is 'force-cache' or - // 'only-if-cached' we can respond with the cached entry. set the status - // based on the result of needsRevalidation and respond - const _needsRevalidation = entry.policy.needsRevalidation(request) - if (options.cache === 'force-cache' || - options.cache === 'only-if-cached' || - !_needsRevalidation) { - return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') - } - - // if we got here, the cache entry is stale so revalidate it - return entry.revalidate(request, options) -} - -cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) { - return - } - - return CacheEntry.invalidate(request, options) -} - -module.exports = cacheFetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js deleted file mode 100644 index f7684d562b7fa..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js +++ /dev/null @@ -1,17 +0,0 @@ -const { URL, format } = require('url') - -// options passed to url.format() when generating a key -const formatOptions = { - auth: false, - fragment: false, - search: true, - unicode: false, -} - -// returns a string to be used as the cache key for the Request -const cacheKey = (request) => { - const parsed = new URL(request.url) - return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` -} - -module.exports = cacheKey diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js deleted file mode 100644 index ada3c8600dae9..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js +++ /dev/null @@ -1,161 +0,0 @@ -const CacheSemantics = require('http-cache-semantics') -const Negotiator = require('negotiator') -const ssri = require('ssri') - -// options passed to http-cache-semantics constructor -const policyOptions = { - shared: false, - ignoreCargoCult: true, -} - -// a fake empty response, used when only testing the -// request for storability -const emptyResponse = { status: 200, headers: {} } - -// returns a plain object representation of the Request -const requestObject = (request) => { - const _obj = { - method: request.method, - url: request.url, - headers: {}, - compress: request.compress, - } - - request.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -// returns a plain object representation of the Response -const responseObject = (response) => { - const _obj = { - status: response.status, - headers: {}, - } - - response.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -class CachePolicy { - constructor ({ entry, request, response, options }) { - this.entry = entry - this.request = requestObject(request) - this.response = responseObject(response) - this.options = options - this.policy = new CacheSemantics(this.request, this.response, policyOptions) - - if (this.entry) { - // if we have an entry, copy the timestamp to the _responseTime - // this is necessary because the CacheSemantics constructor forces - // the value to Date.now() which means a policy created from a - // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.metadata.time - } - } - - // static method to quickly determine if a request alone is storable - static storable (request, options) { - // no cachePath means no caching - if (!options.cachePath) { - return false - } - - // user explicitly asked not to cache - if (options.cache === 'no-store') { - return false - } - - // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) { - return false - } - - // otherwise, let http-cache-semantics make the decision - // based on the request's headers - const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) - return policy.storable() - } - - // returns true if the policy satisfies the request - satisfies (request) { - const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) { - return false - } - - if (this.request.compress !== _req.compress) { - return false - } - - const negotiatorA = new Negotiator(this.request) - const negotiatorB = new Negotiator(_req) - - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { - return false - } - - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { - return false - } - - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { - return false - } - - if (this.options.integrity) { - return ssri.parse(this.options.integrity).match(this.entry.integrity) - } - - return true - } - - // returns true if the request and response allow caching - storable () { - return this.policy.storable() - } - - // NOTE: this is a hack to avoid parsing the cache-control - // header ourselves, it returns true if the response's - // cache-control contains must-revalidate - get mustRevalidate () { - return !!this.policy._rescc['must-revalidate'] - } - - // returns true if the cached response requires revalidation - // for the given request - needsRevalidation (request) { - const _req = requestObject(request) - // force method to GET because we only cache GETs - // but can serve a HEAD from a cached GET - _req.method = 'GET' - return !this.policy.satisfiesWithoutRevalidation(_req) - } - - responseHeaders () { - return this.policy.responseHeaders() - } - - // returns a new object containing the appropriate headers - // to send a revalidation request - revalidationHeaders (request) { - const _req = requestObject(request) - return this.policy.revalidationHeaders(_req) - } - - // returns true if the request/response was revalidated - // successfully. returns false if a new response was received - revalidated (request, response) { - const _req = requestObject(request) - const _res = responseObject(response) - const policy = this.policy.revalidatedPolicy(_req, _res) - return !policy.modified - } -} - -module.exports = CachePolicy diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js deleted file mode 100644 index 13102b57c4aa0..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js +++ /dev/null @@ -1,49 +0,0 @@ -const LRUCache = require('lru-cache') -const dns = require('dns') - -const defaultOptions = exports.defaultOptions = { - family: undefined, - hints: dns.ADDRCONFIG, - all: false, - verbatim: undefined, -} - -const lookupCache = exports.lookupCache = new LRUCache({ max: 50 }) - -// this is a factory so that each request can have its own opts (i.e. ttl) -// while still sharing the cache across all requests -exports.getLookup = (dnsOptions) => { - return (hostname, options, callback) => { - if (typeof options === 'function') { - callback = options - options = null - } else if (typeof options === 'number') { - options = { family: options } - } - - options = { ...defaultOptions, ...options } - - const key = JSON.stringify({ - hostname, - family: options.family, - hints: options.hints, - all: options.all, - verbatim: options.verbatim, - }) - - if (lookupCache.has(key)) { - const [address, family] = lookupCache.get(key) - process.nextTick(callback, null, address, family) - return - } - - dnsOptions.lookup(hostname, options, (err, address, family) => { - if (err) { - return callback(err) - } - - lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl }) - return callback(null, address, family) - }) - } -} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js deleted file mode 100644 index 233ba67e16550..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict' - -const { FetchError, Request, isRedirect } = require('minipass-fetch') -const url = require('url') - -const CachePolicy = require('./cache/policy.js') -const cache = require('./cache/index.js') -const remote = require('./remote.js') - -// given a Request, a Response and user options -// return true if the response is a redirect that -// can be followed. we throw errors that will result -// in the fetch being rejected if the redirect is -// possible but invalid for some reason -const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) { - return false - } - - if (options.redirect === 'manual') { - return false - } - - if (options.redirect === 'error') { - throw new FetchError(`redirect mode is set to error: ${request.url}`, - 'no-redirect', { code: 'ENOREDIRECT' }) - } - - if (!response.headers.has('location')) { - throw new FetchError(`redirect location header missing for: ${request.url}`, - 'no-location', { code: 'EINVALIDREDIRECT' }) - } - - if (request.counter >= request.follow) { - throw new FetchError(`maximum redirect reached at: ${request.url}`, - 'max-redirect', { code: 'EMAXREDIRECT' }) - } - - return true -} - -// given a Request, a Response, and the user's options return an object -// with a new Request and a new options object that will be used for -// following the redirect -const getRedirect = (request, response, options) => { - const _opts = { ...options } - const location = response.headers.get('location') - const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) - // Comment below is used under the following license: - /** - * @license - * Copyright (c) 2010-2012 Mikeal Rogers - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an "AS - * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language - * governing permissions and limitations under the License. - */ - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) { - request.headers.delete('authorization') - request.headers.delete('cookie') - } - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - response.status === 303 || - (request.method === 'POST' && [301, 302].includes(response.status)) - ) { - _opts.method = 'GET' - _opts.body = null - request.headers.delete('content-length') - } - - _opts.headers = {} - request.headers.forEach((value, key) => { - _opts.headers[key] = value - }) - - _opts.counter = ++request.counter - const redirectReq = new Request(url.format(redirectUrl), _opts) - return { - request: redirectReq, - options: _opts, - } -} - -const fetch = async (request, options) => { - const response = CachePolicy.storable(request, options) - ? await cache(request, options) - : await remote(request, options) - - // if the request wasn't a GET or HEAD, and the response - // status is between 200 and 399 inclusive, invalidate the - // request url - if (!['GET', 'HEAD'].includes(request.method) && - response.status >= 200 && - response.status <= 399) { - await cache.invalidate(request, options) - } - - if (!canFollowRedirect(request, response, options)) { - return response - } - - const redirect = getRedirect(request, response, options) - return fetch(redirect.request, redirect.options) -} - -module.exports = fetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js deleted file mode 100644 index 2f12e8e1b6113..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js +++ /dev/null @@ -1,41 +0,0 @@ -const { FetchError, Headers, Request, Response } = require('minipass-fetch') - -const configureOptions = require('./options.js') -const fetch = require('./fetch.js') - -const makeFetchHappen = (url, opts) => { - const options = configureOptions(opts) - - const request = new Request(url, options) - return fetch(request, options) -} - -makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { - if (typeof defaultUrl === 'object') { - defaultOptions = defaultUrl - defaultUrl = null - } - - const defaultedFetch = (url, options = {}) => { - const finalUrl = url || defaultUrl - const finalOptions = { - ...defaultOptions, - ...options, - headers: { - ...defaultOptions.headers, - ...options.headers, - }, - } - return wrappedFetch(finalUrl, finalOptions) - } - - defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => - makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) - return defaultedFetch -} - -module.exports = makeFetchHappen -module.exports.FetchError = FetchError -module.exports.Headers = Headers -module.exports.Request = Request -module.exports.Response = Response diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js deleted file mode 100644 index daa9ecd9d5d5f..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js +++ /dev/null @@ -1,52 +0,0 @@ -const dns = require('dns') - -const conditionalHeaders = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', -] - -const configureOptions = (opts) => { - const { strictSSL, ...options } = { ...opts } - options.method = options.method ? options.method.toUpperCase() : 'GET' - options.rejectUnauthorized = strictSSL !== false - - if (!options.retry) { - options.retry = { retries: 0 } - } else if (typeof options.retry === 'string') { - const retries = parseInt(options.retry, 10) - if (isFinite(retries)) { - options.retry = { retries } - } else { - options.retry = { retries: 0 } - } - } else if (typeof options.retry === 'number') { - options.retry = { retries: options.retry } - } else { - options.retry = { retries: 0, ...options.retry } - } - - options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } - - options.cache = options.cache || 'default' - if (options.cache === 'default') { - const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { - return conditionalHeaders.includes(name.toLowerCase()) - }) - if (hasConditionalHeader) { - options.cache = 'no-store' - } - } - - // cacheManager is deprecated, but if it's set and - // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) { - options.cachePath = options.cacheManager - } - - return options -} - -module.exports = configureOptions diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js deleted file mode 100644 index b1d221b2d0ce3..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const MinipassPipeline = require('minipass-pipeline') - -class CachingMinipassPipeline extends MinipassPipeline { - #events = [] - #data = new Map() - - constructor (opts, ...streams) { - // CRITICAL: do NOT pass the streams to the call to super(), this will start - // the flow of data and potentially cause the events we need to catch to emit - // before we've finished our own setup. instead we call super() with no args, - // finish our setup, and then push the streams into ourselves to start the - // data flow - super() - this.#events = opts.events - - /* istanbul ignore next - coverage disabled because this is pointless to test here */ - if (streams.length) { - this.push(...streams) - } - } - - on (event, handler) { - if (this.#events.includes(event) && this.#data.has(event)) { - return handler(...this.#data.get(event)) - } - - return super.on(event, handler) - } - - emit (event, ...data) { - if (this.#events.includes(event)) { - this.#data.set(event, data) - } - - return super.emit(event, ...data) - } -} - -module.exports = CachingMinipassPipeline diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js deleted file mode 100644 index 068c73a8a7a7e..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js +++ /dev/null @@ -1,121 +0,0 @@ -const Minipass = require('minipass') -const fetch = require('minipass-fetch') -const promiseRetry = require('promise-retry') -const ssri = require('ssri') - -const CachingMinipassPipeline = require('./pipeline.js') -const getAgent = require('./agent.js') -const pkg = require('../package.json') - -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// make a request directly to the remote source, -// retrying certain classes of errors as well as -// following redirects (through the cache if necessary) -// and verifying response integrity -const remoteFetch = (request, options) => { - const agent = getAgent(request.url, options) - if (!request.headers.has('connection')) { - request.headers.set('connection', agent ? 'keep-alive' : 'close') - } - - if (!request.headers.has('user-agent')) { - request.headers.set('user-agent', USER_AGENT) - } - - // keep our own options since we're overriding the agent - // and the redirect mode - const _opts = { - ...options, - agent, - redirect: 'manual', - } - - return promiseRetry(async (retryHandler, attemptNum) => { - const req = new fetch.Request(request, _opts) - try { - let res = await fetch(req, _opts) - if (_opts.integrity && res.status === 200) { - // we got a 200 response and the user has specified an expected - // integrity value, so wrap the response in an ssri stream to verify it - const integrityStream = ssri.integrityStream({ - algorithms: _opts.algorithms, - integrity: _opts.integrity, - size: _opts.size, - }) - const pipeline = new CachingMinipassPipeline({ - events: ['integrity', 'size'], - }, res.body, integrityStream) - // we also propagate the integrity and size events out to the pipeline so we can use - // this new response body as an integrityEmitter for cacache - integrityStream.on('integrity', i => pipeline.emit('integrity', i)) - integrityStream.on('size', s => pipeline.emit('size', s)) - res = new fetch.Response(pipeline, res) - // set an explicit flag so we know if our response body will emit integrity and size - res.body.hasIntegrityEmitter = true - } - - res.headers.set('x-fetch-attempts', attemptNum) - - // do not retry POST requests, or requests with a streaming body - // do retry requests with a 408, 420, 429 or 500+ status in the response - const isStream = Minipass.isStream(req.body) - const isRetriable = req.method !== 'POST' && - !isStream && - ([408, 420, 429].includes(res.status) || res.status >= 500) - - if (isRetriable) { - if (typeof options.onRetry === 'function') { - options.onRetry(res) - } - - return retryHandler(res) - } - - return res - } catch (err) { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - // err.retried will be the thing that was thrown from above - // if it's a response, we just got a bad status code and we - // can re-throw to allow the retry - const isRetryError = err.retried instanceof fetch.Response || - (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - - if (req.method === 'POST' || isRetryError) { - throw err - } - - if (typeof options.onRetry === 'function') { - options.onRetry(err) - } - - return retryHandler(err) - } - }, options.retry).catch((err) => { - // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') { - return err - } - - throw err - }) -} - -module.exports = remoteFetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json deleted file mode 100644 index fc491d1152e15..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/package.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "name": "make-fetch-happen", - "version": "10.2.1", - "description": "Opinionated, caching, retrying fetch client", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "test": "tap", - "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.js\"", - "lintfix": "npm run lint -- --fix", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" - }, - "keywords": [ - "http", - "request", - "fetch", - "mean girls", - "caching", - "cache", - "subresource integrity" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^16.1.0", - "http-cache-semantics": "^4.1.0", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^3.1.6", - "minipass-collect": "^1.0.2", - "minipass-fetch": "^2.0.3", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^9.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "mkdirp": "^1.0.4", - "nock": "^13.2.4", - "rimraf": "^3.0.2", - "safe-buffer": "^5.2.1", - "standard-version": "^9.3.2", - "tap": "^16.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "tap": { - "color": 1, - "files": "test/*.js", - "check-coverage": true, - "timeout": 60 - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE b/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE deleted file mode 100644 index 3c3410cdc12ee..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Isaac Z. Schlueter and Contributors -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - ---- - -Note: This is a derivative work based on "node-fetch" by David Frank, -modified and distributed under the terms of the MIT license above. -https://github.com/bitinn/node-fetch diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js deleted file mode 100644 index b18f643269e37..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' -class AbortError extends Error { - constructor (message) { - super(message) - this.code = 'FETCH_ABORTED' - this.type = 'aborted' - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'AbortError' - } - - // don't allow name to be overridden, but don't throw either - set name (s) {} -} -module.exports = AbortError diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js deleted file mode 100644 index efe69a34458af..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js +++ /dev/null @@ -1,97 +0,0 @@ -'use strict' -const Minipass = require('minipass') -const TYPE = Symbol('type') -const BUFFER = Symbol('buffer') - -class Blob { - constructor (blobParts, options) { - this[TYPE] = '' - - const buffers = [] - let size = 0 - - if (blobParts) { - const a = blobParts - const length = Number(a.length) - for (let i = 0; i < length; i++) { - const element = a[i] - const buffer = element instanceof Buffer ? element - : ArrayBuffer.isView(element) - ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) - : element instanceof ArrayBuffer ? Buffer.from(element) - : element instanceof Blob ? element[BUFFER] - : typeof element === 'string' ? Buffer.from(element) - : Buffer.from(String(element)) - size += buffer.length - buffers.push(buffer) - } - } - - this[BUFFER] = Buffer.concat(buffers, size) - - const type = options && options.type !== undefined - && String(options.type).toLowerCase() - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type - } - } - - get size () { - return this[BUFFER].length - } - - get type () { - return this[TYPE] - } - - text () { - return Promise.resolve(this[BUFFER].toString()) - } - - arrayBuffer () { - const buf = this[BUFFER] - const off = buf.byteOffset - const len = buf.byteLength - const ab = buf.buffer.slice(off, off + len) - return Promise.resolve(ab) - } - - stream () { - return new Minipass().end(this[BUFFER]) - } - - slice (start, end, type) { - const size = this.size - const relativeStart = start === undefined ? 0 - : start < 0 ? Math.max(size + start, 0) - : Math.min(start, size) - const relativeEnd = end === undefined ? size - : end < 0 ? Math.max(size + end, 0) - : Math.min(end, size) - const span = Math.max(relativeEnd - relativeStart, 0) - - const buffer = this[BUFFER] - const slicedBuffer = buffer.slice( - relativeStart, - relativeStart + span - ) - const blob = new Blob([], { type }) - blob[BUFFER] = slicedBuffer - return blob - } - - get [Symbol.toStringTag] () { - return 'Blob' - } - - static get BUFFER () { - return BUFFER - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, -}) - -module.exports = Blob diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js deleted file mode 100644 index 9d1b45de30ce9..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js +++ /dev/null @@ -1,350 +0,0 @@ -'use strict' -const Minipass = require('minipass') -const MinipassSized = require('minipass-sized') - -const Blob = require('./blob.js') -const { BUFFER } = Blob -const FetchError = require('./fetch-error.js') - -// optional dependency on 'encoding' -let convert -try { - convert = require('encoding').convert -} catch (e) { - // defer error until textConverted is called -} - -const INTERNALS = Symbol('Body internals') -const CONSUME_BODY = Symbol('consumeBody') - -class Body { - constructor (bodyArg, options = {}) { - const { size = 0, timeout = 0 } = options - const body = bodyArg === undefined || bodyArg === null ? null - : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) - : isBlob(bodyArg) ? bodyArg - : Buffer.isBuffer(bodyArg) ? bodyArg - : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' - ? Buffer.from(bodyArg) - : ArrayBuffer.isView(bodyArg) - ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) - : Minipass.isStream(bodyArg) ? bodyArg - : Buffer.from(String(bodyArg)) - - this[INTERNALS] = { - body, - disturbed: false, - error: null, - } - - this.size = size - this.timeout = timeout - - if (Minipass.isStream(body)) { - body.on('error', er => { - const error = er.name === 'AbortError' ? er - : new FetchError(`Invalid response while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - this[INTERNALS].error = error - }) - } - } - - get body () { - return this[INTERNALS].body - } - - get bodyUsed () { - return this[INTERNALS].disturbed - } - - arrayBuffer () { - return this[CONSUME_BODY]().then(buf => - buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) - } - - blob () { - const ct = this.headers && this.headers.get('content-type') || '' - return this[CONSUME_BODY]().then(buf => Object.assign( - new Blob([], { type: ct.toLowerCase() }), - { [BUFFER]: buf } - )) - } - - async json () { - const buf = await this[CONSUME_BODY]() - try { - return JSON.parse(buf.toString()) - } catch (er) { - throw new FetchError( - `invalid json response body at ${this.url} reason: ${er.message}`, - 'invalid-json' - ) - } - } - - text () { - return this[CONSUME_BODY]().then(buf => buf.toString()) - } - - buffer () { - return this[CONSUME_BODY]() - } - - textConverted () { - return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) - } - - [CONSUME_BODY] () { - if (this[INTERNALS].disturbed) { - return Promise.reject(new TypeError(`body used already for: ${ - this.url}`)) - } - - this[INTERNALS].disturbed = true - - if (this[INTERNALS].error) { - return Promise.reject(this[INTERNALS].error) - } - - // body is null - if (this.body === null) { - return Promise.resolve(Buffer.alloc(0)) - } - - if (Buffer.isBuffer(this.body)) { - return Promise.resolve(this.body) - } - - const upstream = isBlob(this.body) ? this.body.stream() : this.body - - /* istanbul ignore if: should never happen */ - if (!Minipass.isStream(upstream)) { - return Promise.resolve(Buffer.alloc(0)) - } - - const stream = this.size && upstream instanceof MinipassSized ? upstream - : !this.size && upstream instanceof Minipass && - !(upstream instanceof MinipassSized) ? upstream - : this.size ? new MinipassSized({ size: this.size }) - : new Minipass() - - // allow timeout on slow response body, but only if the stream is still writable. this - // makes the timeout center on the socket stream from lib/index.js rather than the - // intermediary minipass stream we create to receive the data - const resTimeout = this.timeout && stream.writable ? setTimeout(() => { - stream.emit('error', new FetchError( - `Response timeout while trying to fetch ${ - this.url} (over ${this.timeout}ms)`, 'body-timeout')) - }, this.timeout) : null - - // do not keep the process open just for this timeout, even - // though we expect it'll get cleared eventually. - if (resTimeout && resTimeout.unref) { - resTimeout.unref() - } - - // do the pipe in the promise, because the pipe() can send too much - // data through right away and upset the MP Sized object - return new Promise((resolve, reject) => { - // if the stream is some other kind of stream, then pipe through a MP - // so we can collect it more easily. - if (stream !== upstream) { - upstream.on('error', er => stream.emit('error', er)) - upstream.pipe(stream) - } - resolve() - }).then(() => stream.concat()).then(buf => { - clearTimeout(resTimeout) - return buf - }).catch(er => { - clearTimeout(resTimeout) - // request was aborted, reject with this Error - if (er.name === 'AbortError' || er.name === 'FetchError') { - throw er - } else if (er.name === 'RangeError') { - throw new FetchError(`Could not create Buffer from response body for ${ - this.url}: ${er.message}`, 'system', er) - } else { - // other errors, such as incorrect content-encoding or content-length - throw new FetchError(`Invalid response body while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - } - }) - } - - static clone (instance) { - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used') - } - - const body = instance.body - - // check that body is a stream and not form-data object - // NB: can't clone the form-data object without having it as a dependency - if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { - // create a dedicated tee stream so that we don't lose data - // potentially sitting in the body stream's buffer by writing it - // immediately to p1 and not having it for p2. - const tee = new Minipass() - const p1 = new Minipass() - const p2 = new Minipass() - tee.on('error', er => { - p1.emit('error', er) - p2.emit('error', er) - }) - body.on('error', er => tee.emit('error', er)) - tee.pipe(p1) - tee.pipe(p2) - body.pipe(tee) - // set instance body to one fork, return the other - instance[INTERNALS].body = p1 - return p2 - } else { - return instance.body - } - } - - static extractContentType (body) { - return body === null || body === undefined ? null - : typeof body === 'string' ? 'text/plain;charset=UTF-8' - : isURLSearchParams(body) - ? 'application/x-www-form-urlencoded;charset=UTF-8' - : isBlob(body) ? body.type || null - : Buffer.isBuffer(body) ? null - : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null - : ArrayBuffer.isView(body) ? null - : typeof body.getBoundary === 'function' - ? `multipart/form-data;boundary=${body.getBoundary()}` - : Minipass.isStream(body) ? null - : 'text/plain;charset=UTF-8' - } - - static getTotalBytes (instance) { - const { body } = instance - return (body === null || body === undefined) ? 0 - : isBlob(body) ? body.size - : Buffer.isBuffer(body) ? body.length - : body && typeof body.getLengthSync === 'function' && ( - // detect form data input from form-data module - body._lengthRetrievers && - /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) // 2.x - ? body.getLengthSync() - : null - } - - static writeToStream (dest, instance) { - const { body } = instance - - if (body === null || body === undefined) { - dest.end() - } else if (Buffer.isBuffer(body) || typeof body === 'string') { - dest.end(body) - } else { - // body is stream or blob - const stream = isBlob(body) ? body.stream() : body - stream.on('error', er => dest.emit('error', er)).pipe(dest) - } - - return dest - } -} - -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true }, -}) - -const isURLSearchParams = obj => - // Duck-typing as a necessary condition. - (typeof obj !== 'object' || - typeof obj.append !== 'function' || - typeof obj.delete !== 'function' || - typeof obj.get !== 'function' || - typeof obj.getAll !== 'function' || - typeof obj.has !== 'function' || - typeof obj.set !== 'function') ? false - // Brand-checking and more duck-typing as optional condition. - : obj.constructor.name === 'URLSearchParams' || - Object.prototype.toString.call(obj) === '[object URLSearchParams]' || - typeof obj.sort === 'function' - -const isBlob = obj => - typeof obj === 'object' && - typeof obj.arrayBuffer === 'function' && - typeof obj.type === 'string' && - typeof obj.stream === 'function' && - typeof obj.constructor === 'function' && - typeof obj.constructor.name === 'string' && - /^(Blob|File)$/.test(obj.constructor.name) && - /^(Blob|File)$/.test(obj[Symbol.toStringTag]) - -const convertBody = (buffer, headers) => { - /* istanbul ignore if */ - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function') - } - - const ct = headers && headers.get('content-type') - let charset = 'utf-8' - let res - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct) - } - - // no charset in content type, peek at response body for at most 1024 bytes - const str = buffer.slice(0, 1024).toString() - - // html5 - if (!res && str) { - res = / this.expect - ? 'max-size' : type - this.message = message - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'FetchError' - } - - // don't allow name to be overwritten - set name (n) {} - - get [Symbol.toStringTag] () { - return 'FetchError' - } -} -module.exports = FetchError diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js deleted file mode 100644 index dd6e854d5ba39..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js +++ /dev/null @@ -1,267 +0,0 @@ -'use strict' -const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - -const validateName = name => { - name = `${name}` - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`) - } -} - -const validateValue = value => { - value = `${value}` - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`) - } -} - -const find = (map, name) => { - name = name.toLowerCase() - for (const key in map) { - if (key.toLowerCase() === name) { - return key - } - } - return undefined -} - -const MAP = Symbol('map') -class Headers { - constructor (init = undefined) { - this[MAP] = Object.create(null) - if (init instanceof Headers) { - const rawHeaders = init.raw() - const headerNames = Object.keys(rawHeaders) - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value) - } - } - return - } - - // no-op - if (init === undefined || init === null) { - return - } - - if (typeof init === 'object') { - const method = init[Symbol.iterator] - if (method !== null && method !== undefined) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable') - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = [] - for (const pair of init) { - if (typeof pair !== 'object' || - typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable') - } - const arrPair = Array.from(pair) - if (arrPair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple') - } - pairs.push(arrPair) - } - - for (const pair of pairs) { - this.append(pair[0], pair[1]) - } - } else { - // record - for (const key of Object.keys(init)) { - this.append(key, init[key]) - } - } - } else { - throw new TypeError('Provided initializer must be an object') - } - } - - get (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key === undefined) { - return null - } - - return this[MAP][key].join(', ') - } - - forEach (callback, thisArg = undefined) { - let pairs = getHeaders(this) - for (let i = 0; i < pairs.length; i++) { - const [name, value] = pairs[i] - callback.call(thisArg, value, name, this) - // refresh in case the callback added more headers - pairs = getHeaders(this) - } - } - - set (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - this[MAP][key !== undefined ? key : name] = [value] - } - - append (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - if (key !== undefined) { - this[MAP][key].push(value) - } else { - this[MAP][name] = [value] - } - } - - has (name) { - name = `${name}` - validateName(name) - return find(this[MAP], name) !== undefined - } - - delete (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key !== undefined) { - delete this[MAP][key] - } - } - - raw () { - return this[MAP] - } - - keys () { - return new HeadersIterator(this, 'key') - } - - values () { - return new HeadersIterator(this, 'value') - } - - [Symbol.iterator] () { - return new HeadersIterator(this, 'key+value') - } - - entries () { - return new HeadersIterator(this, 'key+value') - } - - get [Symbol.toStringTag] () { - return 'Headers' - } - - static exportNodeCompatibleHeaders (headers) { - const obj = Object.assign(Object.create(null), headers[MAP]) - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host') - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0] - } - - return obj - } - - static createHeadersLenient (obj) { - const headers = new Headers() - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue - } - - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue - } - - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val] - } else { - headers[MAP][name].push(val) - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]] - } - } - return headers - } -} - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true }, -}) - -const getHeaders = (headers, kind = 'key+value') => - Object.keys(headers[MAP]).sort().map( - kind === 'key' ? k => k.toLowerCase() - : kind === 'value' ? k => headers[MAP][k].join(', ') - : k => [k.toLowerCase(), headers[MAP][k].join(', ')] - ) - -const INTERNAL = Symbol('internal') - -class HeadersIterator { - constructor (target, kind) { - this[INTERNAL] = { - target, - kind, - index: 0, - } - } - - get [Symbol.toStringTag] () { - return 'HeadersIterator' - } - - next () { - /* istanbul ignore if: should be impossible */ - if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { - throw new TypeError('Value of `this` is not a HeadersIterator') - } - - const { target, kind, index } = this[INTERNAL] - const values = getHeaders(target, kind) - const len = values.length - if (index >= len) { - return { - value: undefined, - done: true, - } - } - - this[INTERNAL].index++ - - return { value: values[index], done: false } - } -} - -// manually extend because 'extends' requires a ctor -Object.setPrototypeOf(HeadersIterator.prototype, - Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) - -module.exports = Headers diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js deleted file mode 100644 index b1878ac0c06b5..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js +++ /dev/null @@ -1,365 +0,0 @@ -'use strict' -const { URL } = require('url') -const http = require('http') -const https = require('https') -const zlib = require('minizlib') -const Minipass = require('minipass') - -const Body = require('./body.js') -const { writeToStream, getTotalBytes } = Body -const Response = require('./response.js') -const Headers = require('./headers.js') -const { createHeadersLenient } = Headers -const Request = require('./request.js') -const { getNodeRequestOptions } = Request -const FetchError = require('./fetch-error.js') -const AbortError = require('./abort-error.js') - -// XXX this should really be split up and unit-ized for easier testing -// and better DRY implementation of data/http request aborting -const fetch = async (url, opts) => { - if (/^data:/.test(url)) { - const request = new Request(url, opts) - // delay 1 promise tick so that the consumer can abort right away - return Promise.resolve().then(() => new Promise((resolve, reject) => { - let type, data - try { - const { pathname, search } = new URL(url) - const split = pathname.split(',') - if (split.length < 2) { - throw new Error('invalid data: URI') - } - const mime = split.shift() - const base64 = /;base64$/.test(mime) - type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime - const rawData = decodeURIComponent(split.join(',') + search) - data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) - } catch (er) { - return reject(new FetchError(`[${request.method}] ${ - request.url} invalid URL, ${er.message}`, 'system', er)) - } - - const { signal } = request - if (signal && signal.aborted) { - return reject(new AbortError('The user aborted a request.')) - } - - const headers = { 'Content-Length': data.length } - if (type) { - headers['Content-Type'] = type - } - return resolve(new Response(data, { headers })) - })) - } - - return new Promise((resolve, reject) => { - // build request object - const request = new Request(url, opts) - let options - try { - options = getNodeRequestOptions(request) - } catch (er) { - return reject(er) - } - - const send = (options.protocol === 'https:' ? https : http).request - const { signal } = request - let response = null - const abort = () => { - const error = new AbortError('The user aborted a request.') - reject(error) - if (Minipass.isStream(request.body) && - typeof request.body.destroy === 'function') { - request.body.destroy(error) - } - if (response && response.body) { - response.body.emit('error', error) - } - } - - if (signal && signal.aborted) { - return abort() - } - - const abortAndFinalize = () => { - abort() - finalize() - } - - const finalize = () => { - req.abort() - if (signal) { - signal.removeEventListener('abort', abortAndFinalize) - } - clearTimeout(reqTimeout) - } - - // send request - const req = send(options) - - if (signal) { - signal.addEventListener('abort', abortAndFinalize) - } - - let reqTimeout = null - if (request.timeout) { - req.once('socket', socket => { - reqTimeout = setTimeout(() => { - reject(new FetchError(`network timeout at: ${ - request.url}`, 'request-timeout')) - finalize() - }, request.timeout) - }) - } - - req.on('error', er => { - // if a 'response' event is emitted before the 'error' event, then by the - // time this handler is run it's too late to reject the Promise for the - // response. instead, we forward the error event to the response stream - // so that the error will surface to the user when they try to consume - // the body. this is done as a side effect of aborting the request except - // for in windows, where we must forward the event manually, otherwise - // there is no longer a ref'd socket attached to the request and the - // stream never ends so the event loop runs out of work and the process - // exits without warning. - // coverage skipped here due to the difficulty in testing - // istanbul ignore next - if (req.res) { - req.res.emit('error', er) - } - reject(new FetchError(`request to ${request.url} failed, reason: ${ - er.message}`, 'system', er)) - finalize() - }) - - req.on('response', res => { - clearTimeout(reqTimeout) - - const headers = createHeadersLenient(res.headers) - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location') - - // HTTP fetch step 5.3 - const locationURL = location === null ? null - : (new URL(location, request.url)).toString() - - // HTTP fetch step 5.5 - if (request.redirect === 'error') { - reject(new FetchError('uri requested responds with a redirect, ' + - `redirect mode is set to error: ${request.url}`, 'no-redirect')) - finalize() - return - } else if (request.redirect === 'manual') { - // node-fetch-specific step: make manual redirect a bit easier to - // use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL) - } catch (err) { - /* istanbul ignore next: nodejs server prevent invalid - response headers, we can't test this through normal - request */ - reject(err) - } - } - } else if (request.redirect === 'follow' && locationURL !== null) { - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${ - request.url}`, 'max-redirect')) - finalize() - return - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && - request.body && - getTotalBytes(request) === null) { - reject(new FetchError( - 'Cannot follow redirect with body being a readable stream', - 'unsupported-redirect' - )) - finalize() - return - } - - // Update host due to redirection - request.headers.set('host', (new URL(locationURL)).host) - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - } - - // if the redirect is to a new hostname, strip the authorization and cookie headers - const parsedOriginal = new URL(request.url) - const parsedRedirect = new URL(locationURL) - if (parsedOriginal.hostname !== parsedRedirect.hostname) { - requestOpts.headers.delete('authorization') - requestOpts.headers.delete('cookie') - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || ( - (res.statusCode === 301 || res.statusCode === 302) && - request.method === 'POST' - )) { - requestOpts.method = 'GET' - requestOpts.body = undefined - requestOpts.headers.delete('content-length') - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))) - finalize() - return - } - } // end if(isRedirect) - - // prepare response - res.once('end', () => - signal && signal.removeEventListener('abort', abortAndFinalize)) - - const body = new Minipass() - // if an error occurs, either on the response stream itself, on one of the - // decoder streams, or a response length timeout from the Body class, we - // forward the error through to our internal body stream. If we see an - // error event on that, we call finalize to abort the request and ensure - // we don't leave a socket believing a request is in flight. - // this is difficult to test, so lacks specific coverage. - body.on('error', finalize) - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) - res.on('data', (chunk) => body.write(chunk)) - res.on('end', () => body.end()) - - const responseOptions = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter, - trailer: new Promise(resolveTrailer => - res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), - } - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding') - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || - request.method === 'HEAD' || - codings === null || - res.statusCode === 204 || - res.statusCode === 304) { - response = new Response(body, responseOptions) - resolve(response) - return - } - - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.constants.Z_SYNC_FLUSH, - finishFlush: zlib.constants.Z_SYNC_FLUSH, - } - - // for gzip - if (codings === 'gzip' || codings === 'x-gzip') { - const unzip = new zlib.Gunzip(zlibOptions) - response = new Response( - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), - responseOptions - ) - resolve(response) - return - } - - // for deflate - if (codings === 'deflate' || codings === 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new Minipass()) - raw.once('data', chunk => { - // see http://stackoverflow.com/questions/37519828 - const decoder = (chunk[0] & 0x0F) === 0x08 - ? new zlib.Inflate() - : new zlib.InflateRaw() - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - }) - return - } - - // for br - if (codings === 'br') { - // ignoring coverage so tests don't have to fake support (or lack of) for brotli - // istanbul ignore next - try { - var decoder = new zlib.BrotliDecompress() - } catch (err) { - reject(err) - finalize() - return - } - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - return - } - - // otherwise, use response as-is - response = new Response(body, responseOptions) - resolve(response) - }) - - writeToStream(req, request) - }) -} - -module.exports = fetch - -fetch.isRedirect = code => - code === 301 || - code === 302 || - code === 303 || - code === 307 || - code === 308 - -fetch.Headers = Headers -fetch.Request = Request -fetch.Response = Response -fetch.FetchError = FetchError -fetch.AbortError = AbortError diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js deleted file mode 100644 index e620df6de2c01..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js +++ /dev/null @@ -1,281 +0,0 @@ -'use strict' -const { URL } = require('url') -const Minipass = require('minipass') -const Headers = require('./headers.js') -const { exportNodeCompatibleHeaders } = Headers -const Body = require('./body.js') -const { clone, extractContentType, getTotalBytes } = Body - -const version = require('../package.json').version -const defaultUserAgent = - `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` - -const INTERNALS = Symbol('Request internals') - -const isRequest = input => - typeof input === 'object' && typeof input[INTERNALS] === 'object' - -const isAbortSignal = signal => { - const proto = ( - signal - && typeof signal === 'object' - && Object.getPrototypeOf(signal) - ) - return !!(proto && proto.constructor.name === 'AbortSignal') -} - -class Request extends Body { - constructor (input, init = {}) { - const parsedURL = isRequest(input) ? new URL(input.url) - : input && input.href ? new URL(input.href) - : new URL(`${input}`) - - if (isRequest(input)) { - init = { ...input[INTERNALS], ...init } - } else if (!input || typeof input === 'string') { - input = {} - } - - const method = (init.method || input.method || 'GET').toUpperCase() - const isGETHEAD = method === 'GET' || method === 'HEAD' - - if ((init.body !== null && init.body !== undefined || - isRequest(input) && input.body !== null) && isGETHEAD) { - throw new TypeError('Request with GET/HEAD method cannot have body') - } - - const inputBody = init.body !== null && init.body !== undefined ? init.body - : isRequest(input) && input.body !== null ? clone(input) - : null - - super(inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0, - }) - - const headers = new Headers(init.headers || input.headers || {}) - - if (inputBody !== null && inputBody !== undefined && - !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - const signal = 'signal' in init ? init.signal - : null - - if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { - throw new TypeError('Expected signal must be an instanceof AbortSignal') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = init - - this[INTERNALS] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow - : input.follow !== undefined ? input.follow - : 20 - this.compress = init.compress !== undefined ? init.compress - : input.compress !== undefined ? input.compress - : true - this.counter = init.counter || input.counter || 0 - this.agent = init.agent || input.agent - } - - get method () { - return this[INTERNALS].method - } - - get url () { - return this[INTERNALS].parsedURL.toString() - } - - get headers () { - return this[INTERNALS].headers - } - - get redirect () { - return this[INTERNALS].redirect - } - - get signal () { - return this[INTERNALS].signal - } - - clone () { - return new Request(this) - } - - get [Symbol.toStringTag] () { - return 'Request' - } - - static getNodeRequestOptions (request) { - const parsedURL = request[INTERNALS].parsedURL - const headers = new Headers(request[INTERNALS].headers) - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*') - } - - // Basic fetch - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported') - } - - if (request.signal && - Minipass.isStream(request.body) && - typeof request.body.destroy !== 'function') { - throw new Error( - 'Cancellation of streamed requests with AbortSignal is not supported') - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - const contentLengthValue = - (request.body === null || request.body === undefined) && - /^(POST|PUT)$/i.test(request.method) ? '0' - : request.body !== null && request.body !== undefined - ? getTotalBytes(request) - : null - - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue + '') - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', defaultUserAgent) - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate') - } - - const agent = typeof request.agent === 'function' - ? request.agent(parsedURL) - : request.agent - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = request[INTERNALS] - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - // we cannot spread parsedURL directly, so we have to read each property one-by-one - // and map them to the equivalent https?.request() method options - const urlProps = { - auth: parsedURL.username || parsedURL.password - ? `${parsedURL.username}:${parsedURL.password}` - : '', - host: parsedURL.host, - hostname: parsedURL.hostname, - path: `${parsedURL.pathname}${parsedURL.search}`, - port: parsedURL.port, - protocol: parsedURL.protocol, - } - - return { - ...urlProps, - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } - } -} - -module.exports = Request - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true }, -}) diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js deleted file mode 100644 index 54cb52db3594a..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' -const http = require('http') -const { STATUS_CODES } = http - -const Headers = require('./headers.js') -const Body = require('./body.js') -const { clone, extractContentType } = Body - -const INTERNALS = Symbol('Response internals') - -class Response extends Body { - constructor (body = null, opts = {}) { - super(body, opts) - - const status = opts.status || 200 - const headers = new Headers(opts.headers) - - if (body !== null && body !== undefined && !headers.has('Content-Type')) { - const contentType = extractContentType(body) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - this[INTERNALS] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter, - trailer: Promise.resolve(opts.trailer || new Headers()), - } - } - - get trailer () { - return this[INTERNALS].trailer - } - - get url () { - return this[INTERNALS].url || '' - } - - get status () { - return this[INTERNALS].status - } - - get ok () { - return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 - } - - get redirected () { - return this[INTERNALS].counter > 0 - } - - get statusText () { - return this[INTERNALS].statusText - } - - get headers () { - return this[INTERNALS].headers - } - - clone () { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected, - trailer: this.trailer, - }) - } - - get [Symbol.toStringTag] () { - return 'Response' - } -} - -module.exports = Response - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true }, -}) diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/package.json b/node_modules/node-gyp/node_modules/minipass-fetch/package.json deleted file mode 100644 index ada5aed6d7cf6..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass-fetch/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "minipass-fetch", - "version": "2.1.2", - "description": "An implementation of window.fetch in Node.js using Minipass streams", - "license": "MIT", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "prepublishOnly": "git push origin --follow-tags", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "tap": { - "coverage-map": "map.js", - "check-coverage": true - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "@ungap/url-search-params": "^0.2.2", - "abort-controller": "^3.0.0", - "abortcontroller-polyfill": "~1.7.3", - "encoding": "^0.1.13", - "form-data": "^4.0.0", - "nock": "^13.2.4", - "parted": "^0.1.1", - "string-to-arraybuffer": "^1.0.2", - "tap": "^16.0.0" - }, - "dependencies": { - "minipass": "^3.1.6", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/minipass-fetch.git" - }, - "keywords": [ - "fetch", - "minipass", - "node-fetch", - "window.fetch" - ], - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/node-gyp/node_modules/minipass/LICENSE b/node_modules/node-gyp/node_modules/minipass/LICENSE deleted file mode 100644 index bf1dece2e1f12..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/minipass/index.js b/node_modules/node-gyp/node_modules/minipass/index.js deleted file mode 100644 index e8797aab6cc27..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass/index.js +++ /dev/null @@ -1,649 +0,0 @@ -'use strict' -const proc = typeof process === 'object' && process ? process : { - stdout: null, - stderr: null, -} -const EE = require('events') -const Stream = require('stream') -const SD = require('string_decoder').StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' - -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor (src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe () { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors () {} - end () { - this.unpipe() - if (this.opts.end) - this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe () { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor (src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = [] - this.buffer = [] - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[ASYNC] = options && !!options.async || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - } - - get bufferLength () { return this[BUFFERLENGTH] } - - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') - - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding (enc) { - this.encoding = enc - } - - get objectMode () { return this[OBJECTMODE] } - set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } - - get ['async'] () { return this[ASYNC] } - set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } - - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } - - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - - if (!encoding) - encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing - } - - read (n) { - if (this[DESTROYED]) - return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) - n = null - - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = [this.buffer.join('')] - else - this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this.buffer[0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer[0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this.buffer.length && !this[EOF]) - this.emit('drain') - - return chunk - } - - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() - else - this.emit('drain') - } - - resume () { - return this[RESUME]() - } - - pause () { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed () { - return this[DESTROYED] - } - - get flowing () { - return this[FLOWING] - } - - get paused () { - return this[PAUSED] - } - - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - this.buffer.push(chunk) - } - - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer[0].length - } - return this.buffer.shift() - } - - [FLUSH] (noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) - - if (!noDrain && !this.buffer.length && !this[EOF]) - this.emit('drain') - } - - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false - } - - pipe (dest, opts) { - if (this[DESTROYED]) - return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false - else - opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end() - } else { - this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)) - if (this[ASYNC]) - defer(() => this[RESUME]()) - else - this[RESUME]() - } - - return dest - } - - unpipe (dest) { - const p = this.pipes.find(p => p.dest === dest) - if (p) { - this.pipes.splice(this.pipes.indexOf(p), 1) - p.unpipe() - } - } - - addListener (ev, fn) { - return this.on(ev, fn) - } - - on (ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) - defer(() => fn.call(this, this[EMITTED_ERROR])) - else - fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd () { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false - } - } - - emit (ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !data ? false - : this[ASYNC] ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - const ret = super.emit('error', data) - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA] (data) { - for (const p of this.pipes) { - if (p.dest.write(data) === false) - this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND] () { - if (this[EMITTED_END]) - return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) - defer(() => this[EMITEND2]()) - else - this[EMITEND2]() - } - - [EMITEND2] () { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this.pipes) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this.pipes) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) - } - - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) - return Promise.resolve({ done: true }) - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { next } - } - - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } - } - return { next } - } - - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this.buffer.length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() - - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) - - return this - } - - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) - } -} diff --git a/node_modules/node-gyp/node_modules/minipass/package.json b/node_modules/node-gyp/node_modules/minipass/package.json deleted file mode 100644 index 548d03fa6d5d4..0000000000000 --- a/node_modules/node-gyp/node_modules/minipass/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "minipass", - "version": "3.3.6", - "description": "minimal implementation of a PassThrough stream", - "main": "index.js", - "types": "index.d.ts", - "dependencies": { - "yallist": "^4.0.0" - }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typescript": "^4.7.3" - }, - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" - }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" - }, - "prettier": { - "semi": false, - "printWidth": 80, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - } -} diff --git a/node_modules/node-gyp/node_modules/ssri/LICENSE.md b/node_modules/node-gyp/node_modules/ssri/LICENSE.md deleted file mode 100644 index e335388869f50..0000000000000 --- a/node_modules/node-gyp/node_modules/ssri/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2021 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/ssri/lib/index.js b/node_modules/node-gyp/node_modules/ssri/lib/index.js deleted file mode 100644 index 1443137cbc708..0000000000000 --- a/node_modules/node-gyp/node_modules/ssri/lib/index.js +++ /dev/null @@ -1,524 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const MiniPass = require('minipass') - -const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512'] - -// TODO: this should really be a hardcoded list of algorithms we support, -// rather than [a-z0-9]. -const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i -const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ -const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ -const VCHAR_REGEX = /^[\x21-\x7E]+$/ - -const defaultOpts = { - algorithms: ['sha512'], - error: false, - options: [], - pickAlgorithm: getPrioritizedHash, - sep: ' ', - single: false, - strict: false, -} - -const ssriOpts = (opts = {}) => ({ ...defaultOpts, ...opts }) - -const getOptString = options => !options || !options.length - ? '' - : `?${options.join('?')}` - -const _onEnd = Symbol('_onEnd') -const _getOptions = Symbol('_getOptions') -const _emittedSize = Symbol('_emittedSize') -const _emittedIntegrity = Symbol('_emittedIntegrity') -const _emittedVerified = Symbol('_emittedVerified') - -class IntegrityStream extends MiniPass { - constructor (opts) { - super() - this.size = 0 - this.opts = opts - - // may be overridden later, but set now for class consistency - this[_getOptions]() - - // options used for calculating stream. can't be changed. - const { algorithms = defaultOpts.algorithms } = opts - this.algorithms = Array.from( - new Set(algorithms.concat(this.algorithm ? [this.algorithm] : [])) - ) - this.hashes = this.algorithms.map(crypto.createHash) - } - - [_getOptions] () { - const { - integrity, - size, - options, - } = { ...defaultOpts, ...this.opts } - - // For verification - this.sri = integrity ? parse(integrity, this.opts) : null - this.expectedSize = size - this.goodSri = this.sri ? !!Object.keys(this.sri).length : false - this.algorithm = this.goodSri ? this.sri.pickAlgorithm(this.opts) : null - this.digests = this.goodSri ? this.sri[this.algorithm] : null - this.optString = getOptString(options) - } - - on (ev, handler) { - if (ev === 'size' && this[_emittedSize]) { - return handler(this[_emittedSize]) - } - - if (ev === 'integrity' && this[_emittedIntegrity]) { - return handler(this[_emittedIntegrity]) - } - - if (ev === 'verified' && this[_emittedVerified]) { - return handler(this[_emittedVerified]) - } - - return super.on(ev, handler) - } - - emit (ev, data) { - if (ev === 'end') { - this[_onEnd]() - } - return super.emit(ev, data) - } - - write (data) { - this.size += data.length - this.hashes.forEach(h => h.update(data)) - return super.write(data) - } - - [_onEnd] () { - if (!this.goodSri) { - this[_getOptions]() - } - const newSri = parse(this.hashes.map((h, i) => { - return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` - }).join(' '), this.opts) - // Integrity verification mode - const match = this.goodSri && newSri.match(this.sri, this.opts) - if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { - /* eslint-disable-next-line max-len */ - const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) - err.code = 'EBADSIZE' - err.found = this.size - err.expected = this.expectedSize - err.sri = this.sri - this.emit('error', err) - } else if (this.sri && !match) { - /* eslint-disable-next-line max-len */ - const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) - err.code = 'EINTEGRITY' - err.found = newSri - err.expected = this.digests - err.algorithm = this.algorithm - err.sri = this.sri - this.emit('error', err) - } else { - this[_emittedSize] = this.size - this.emit('size', this.size) - this[_emittedIntegrity] = newSri - this.emit('integrity', newSri) - if (match) { - this[_emittedVerified] = match - this.emit('verified', match) - } - } - } -} - -class Hash { - get isHash () { - return true - } - - constructor (hash, opts) { - opts = ssriOpts(opts) - const strict = !!opts.strict - this.source = hash.trim() - - // set default values so that we make V8 happy to - // always see a familiar object template. - this.digest = '' - this.algorithm = '' - this.options = [] - - // 3.1. Integrity metadata (called "Hash" by ssri) - // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description - const match = this.source.match( - strict - ? STRICT_SRI_REGEX - : SRI_REGEX - ) - if (!match) { - return - } - if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { - return - } - this.algorithm = match[1] - this.digest = match[2] - - const rawOpts = match[3] - if (rawOpts) { - this.options = rawOpts.slice(1).split('?') - } - } - - hexDigest () { - return this.digest && Buffer.from(this.digest, 'base64').toString('hex') - } - - toJSON () { - return this.toString() - } - - toString (opts) { - opts = ssriOpts(opts) - if (opts.strict) { - // Strict mode enforces the standard as close to the foot of the - // letter as it can. - if (!( - // The spec has very restricted productions for algorithms. - // https://www.w3.org/TR/CSP2/#source-list-syntax - SPEC_ALGORITHMS.some(x => x === this.algorithm) && - // Usually, if someone insists on using a "different" base64, we - // leave it as-is, since there's multiple standards, and the - // specified is not a URL-safe variant. - // https://www.w3.org/TR/CSP2/#base64_value - this.digest.match(BASE64_REGEX) && - // Option syntax is strictly visual chars. - // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression - // https://tools.ietf.org/html/rfc5234#appendix-B.1 - this.options.every(opt => opt.match(VCHAR_REGEX)) - )) { - return '' - } - } - const options = this.options && this.options.length - ? `?${this.options.join('?')}` - : '' - return `${this.algorithm}-${this.digest}${options}` - } -} - -class Integrity { - get isIntegrity () { - return true - } - - toJSON () { - return this.toString() - } - - isEmpty () { - return Object.keys(this).length === 0 - } - - toString (opts) { - opts = ssriOpts(opts) - let sep = opts.sep || ' ' - if (opts.strict) { - // Entries must be separated by whitespace, according to spec. - sep = sep.replace(/\S+/g, ' ') - } - return Object.keys(this).map(k => { - return this[k].map(hash => { - return Hash.prototype.toString.call(hash, opts) - }).filter(x => x.length).join(sep) - }).filter(x => x.length).join(sep) - } - - concat (integrity, opts) { - opts = ssriOpts(opts) - const other = typeof integrity === 'string' - ? integrity - : stringify(integrity, opts) - return parse(`${this.toString(opts)} ${other}`, opts) - } - - hexDigest () { - return parse(this, { single: true }).hexDigest() - } - - // add additional hashes to an integrity value, but prevent - // *changing* an existing integrity hash. - merge (integrity, opts) { - opts = ssriOpts(opts) - const other = parse(integrity, opts) - for (const algo in other) { - if (this[algo]) { - if (!this[algo].find(hash => - other[algo].find(otherhash => - hash.digest === otherhash.digest))) { - throw new Error('hashes do not match, cannot update integrity') - } - } else { - this[algo] = other[algo] - } - } - } - - match (integrity, opts) { - opts = ssriOpts(opts) - const other = parse(integrity, opts) - const algo = other.pickAlgorithm(opts) - return ( - this[algo] && - other[algo] && - this[algo].find(hash => - other[algo].find(otherhash => - hash.digest === otherhash.digest - ) - ) - ) || false - } - - pickAlgorithm (opts) { - opts = ssriOpts(opts) - const pickAlgorithm = opts.pickAlgorithm - const keys = Object.keys(this) - return keys.reduce((acc, algo) => { - return pickAlgorithm(acc, algo) || acc - }) - } -} - -module.exports.parse = parse -function parse (sri, opts) { - if (!sri) { - return null - } - opts = ssriOpts(opts) - if (typeof sri === 'string') { - return _parse(sri, opts) - } else if (sri.algorithm && sri.digest) { - const fullSri = new Integrity() - fullSri[sri.algorithm] = [sri] - return _parse(stringify(fullSri, opts), opts) - } else { - return _parse(stringify(sri, opts), opts) - } -} - -function _parse (integrity, opts) { - // 3.4.3. Parse metadata - // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata - if (opts.single) { - return new Hash(integrity, opts) - } - const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { - const hash = new Hash(string, opts) - if (hash.algorithm && hash.digest) { - const algo = hash.algorithm - if (!acc[algo]) { - acc[algo] = [] - } - acc[algo].push(hash) - } - return acc - }, new Integrity()) - return hashes.isEmpty() ? null : hashes -} - -module.exports.stringify = stringify -function stringify (obj, opts) { - opts = ssriOpts(opts) - if (obj.algorithm && obj.digest) { - return Hash.prototype.toString.call(obj, opts) - } else if (typeof obj === 'string') { - return stringify(parse(obj, opts), opts) - } else { - return Integrity.prototype.toString.call(obj, opts) - } -} - -module.exports.fromHex = fromHex -function fromHex (hexDigest, algorithm, opts) { - opts = ssriOpts(opts) - const optString = getOptString(opts.options) - return parse( - `${algorithm}-${ - Buffer.from(hexDigest, 'hex').toString('base64') - }${optString}`, opts - ) -} - -module.exports.fromData = fromData -function fromData (data, opts) { - opts = ssriOpts(opts) - const algorithms = opts.algorithms - const optString = getOptString(opts.options) - return algorithms.reduce((acc, algo) => { - const digest = crypto.createHash(algo).update(data).digest('base64') - const hash = new Hash( - `${algo}-${digest}${optString}`, - opts - ) - /* istanbul ignore else - it would be VERY strange if the string we - * just calculated with an algo did not have an algo or digest. - */ - if (hash.algorithm && hash.digest) { - const hashAlgo = hash.algorithm - if (!acc[hashAlgo]) { - acc[hashAlgo] = [] - } - acc[hashAlgo].push(hash) - } - return acc - }, new Integrity()) -} - -module.exports.fromStream = fromStream -function fromStream (stream, opts) { - opts = ssriOpts(opts) - const istream = integrityStream(opts) - return new Promise((resolve, reject) => { - stream.pipe(istream) - stream.on('error', reject) - istream.on('error', reject) - let sri - istream.on('integrity', s => { - sri = s - }) - istream.on('end', () => resolve(sri)) - istream.on('data', () => {}) - }) -} - -module.exports.checkData = checkData -function checkData (data, sri, opts) { - opts = ssriOpts(opts) - sri = parse(sri, opts) - if (!sri || !Object.keys(sri).length) { - if (opts.error) { - throw Object.assign( - new Error('No valid integrity hashes to check against'), { - code: 'EINTEGRITY', - } - ) - } else { - return false - } - } - const algorithm = sri.pickAlgorithm(opts) - const digest = crypto.createHash(algorithm).update(data).digest('base64') - const newSri = parse({ algorithm, digest }) - const match = newSri.match(sri, opts) - if (match || !opts.error) { - return match - } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { - /* eslint-disable-next-line max-len */ - const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) - err.code = 'EBADSIZE' - err.found = data.length - err.expected = opts.size - err.sri = sri - throw err - } else { - /* eslint-disable-next-line max-len */ - const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) - err.code = 'EINTEGRITY' - err.found = newSri - err.expected = sri - err.algorithm = algorithm - err.sri = sri - throw err - } -} - -module.exports.checkStream = checkStream -function checkStream (stream, sri, opts) { - opts = ssriOpts(opts) - opts.integrity = sri - sri = parse(sri, opts) - if (!sri || !Object.keys(sri).length) { - return Promise.reject(Object.assign( - new Error('No valid integrity hashes to check against'), { - code: 'EINTEGRITY', - } - )) - } - const checker = integrityStream(opts) - return new Promise((resolve, reject) => { - stream.pipe(checker) - stream.on('error', reject) - checker.on('error', reject) - let verified - checker.on('verified', s => { - verified = s - }) - checker.on('end', () => resolve(verified)) - checker.on('data', () => {}) - }) -} - -module.exports.integrityStream = integrityStream -function integrityStream (opts = {}) { - return new IntegrityStream(opts) -} - -module.exports.create = createIntegrity -function createIntegrity (opts) { - opts = ssriOpts(opts) - const algorithms = opts.algorithms - const optString = getOptString(opts.options) - - const hashes = algorithms.map(crypto.createHash) - - return { - update: function (chunk, enc) { - hashes.forEach(h => h.update(chunk, enc)) - return this - }, - digest: function (enc) { - const integrity = algorithms.reduce((acc, algo) => { - const digest = hashes.shift().digest('base64') - const hash = new Hash( - `${algo}-${digest}${optString}`, - opts - ) - /* istanbul ignore else - it would be VERY strange if the hash we - * just calculated with an algo did not have an algo or digest. - */ - if (hash.algorithm && hash.digest) { - const hashAlgo = hash.algorithm - if (!acc[hashAlgo]) { - acc[hashAlgo] = [] - } - acc[hashAlgo].push(hash) - } - return acc - }, new Integrity()) - - return integrity - }, - } -} - -const NODE_HASHES = new Set(crypto.getHashes()) - -// This is a Best Effort™ at a reasonable priority for hash algos -const DEFAULT_PRIORITY = [ - 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', - // TODO - it's unclear _which_ of these Node will actually use as its name - // for the algorithm, so we guesswork it based on the OpenSSL names. - 'sha3', - 'sha3-256', 'sha3-384', 'sha3-512', - 'sha3_256', 'sha3_384', 'sha3_512', -].filter(algo => NODE_HASHES.has(algo)) - -function getPrioritizedHash (algo1, algo2) { - /* eslint-disable-next-line max-len */ - return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) - ? algo1 - : algo2 -} diff --git a/node_modules/node-gyp/node_modules/ssri/package.json b/node_modules/node-gyp/node_modules/ssri/package.json deleted file mode 100644 index 91c1f919788cd..0000000000000 --- a/node_modules/node-gyp/node_modules/ssri/package.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "name": "ssri", - "version": "9.0.1", - "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "prerelease": "npm t", - "postrelease": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "posttest": "npm run lint", - "test": "tap", - "coverage": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "preversion": "npm test", - "postversion": "npm publish", - "snap": "tap" - }, - "tap": { - "check-coverage": true - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/ssri.git" - }, - "keywords": [ - "w3c", - "web", - "security", - "integrity", - "checksum", - "hashing", - "subresource integrity", - "sri", - "sri hash", - "sri string", - "sri generator", - "html" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "minipass": "^3.1.1" - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.0.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/node-gyp/node_modules/unique-filename/LICENSE b/node_modules/node-gyp/node_modules/unique-filename/LICENSE deleted file mode 100644 index 69619c125ea7e..0000000000000 --- a/node_modules/node-gyp/node_modules/unique-filename/LICENSE +++ /dev/null @@ -1,5 +0,0 @@ -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/unique-filename/lib/index.js b/node_modules/node-gyp/node_modules/unique-filename/lib/index.js deleted file mode 100644 index d067d2e709809..0000000000000 --- a/node_modules/node-gyp/node_modules/unique-filename/lib/index.js +++ /dev/null @@ -1,7 +0,0 @@ -var path = require('path') - -var uniqueSlug = require('unique-slug') - -module.exports = function (filepath, prefix, uniq) { - return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) -} diff --git a/node_modules/node-gyp/node_modules/unique-filename/package.json b/node_modules/node-gyp/node_modules/unique-filename/package.json deleted file mode 100644 index bfdec2c3722a0..0000000000000 --- a/node_modules/node-gyp/node_modules/unique-filename/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "unique-filename", - "version": "2.0.1", - "description": "Generate a unique filename for use in temporary directories or caches.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "snap": "tap", - "posttest": "npm run lint" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-filename.git" - }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/iarna/unique-filename/issues" - }, - "homepage": "https://github.com/iarna/unique-filename", - "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.3.0" - }, - "dependencies": { - "unique-slug": "^3.0.0" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/node-gyp/node_modules/unique-slug/LICENSE b/node_modules/node-gyp/node_modules/unique-slug/LICENSE deleted file mode 100644 index 7953647e7760b..0000000000000 --- a/node_modules/node-gyp/node_modules/unique-slug/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/unique-slug/lib/index.js b/node_modules/node-gyp/node_modules/unique-slug/lib/index.js deleted file mode 100644 index 1bac84d95d730..0000000000000 --- a/node_modules/node-gyp/node_modules/unique-slug/lib/index.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' -var MurmurHash3 = require('imurmurhash') - -module.exports = function (uniq) { - if (uniq) { - var hash = new MurmurHash3(uniq) - return ('00000000' + hash.result().toString(16)).slice(-8) - } else { - return (Math.random().toString(16) + '0000000').slice(2, 10) - } -} diff --git a/node_modules/node-gyp/node_modules/unique-slug/package.json b/node_modules/node-gyp/node_modules/unique-slug/package.json deleted file mode 100644 index 3194408f27fda..0000000000000 --- a/node_modules/node-gyp/node_modules/unique-slug/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "unique-slug", - "version": "3.0.0", - "description": "Generate a unique character string suitible for use in files and URLs.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "snap": "tap", - "posttest": "npm run lint" - }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.3.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-slug.git" - }, - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/node-gyp/package.json b/node_modules/node-gyp/package.json index f95ebeadecd70..7e9fb648ab082 100644 --- a/node_modules/node-gyp/package.json +++ b/node_modules/node-gyp/package.json @@ -11,8 +11,8 @@ "bindings", "gyp" ], - "version": "9.3.1", - "installVersion": 9, + "version": "9.4.0", + "installVersion": 11, "author": "Nathan Rajlich (http://tootallnate.net)", "repository": { "type": "git", @@ -23,9 +23,10 @@ "main": "./lib/node-gyp.js", "dependencies": { "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", "glob": "^7.1.4", "graceful-fs": "^4.2.6", - "make-fetch-happen": "^10.0.3", + "make-fetch-happen": "^11.0.3", "nopt": "^6.0.0", "npmlog": "^6.0.0", "rimraf": "^3.0.2", @@ -38,13 +39,13 @@ }, "devDependencies": { "bindings": "^1.5.0", + "mocha": "^10.2.0", "nan": "^2.14.2", "require-inject": "^1.4.4", - "standard": "^14.3.4", - "tap": "^12.7.0" + "standard": "^14.3.4" }, "scripts": { "lint": "standard */*.js test/**/*.js", - "test": "npm run lint && tap --timeout=600 test/test-*" + "test": "npm run lint && mocha --reporter=test/reporter.js test/test-download.js test/test-*" } } diff --git a/node_modules/node-gyp/test/fixtures/VS_2022_Community_workload.txt b/node_modules/node-gyp/test/fixtures/VS_2022_Community_workload.txt new file mode 100644 index 0000000000000..7cd20f8598926 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2022_Community_workload.txt @@ -0,0 +1,569 @@ +[ + { + "path": "C:\\Program Files\\Microsoft Visual Studio\\2022\\Community", + "version": "17.4.33213.308", + "packages": [ + "Microsoft.VisualStudio.Product.Community", + "Microsoft.VisualStudio.PackageGroup.LiveShare.VSCore", + "Microsoft.VisualStudio.LiveShare.VSCore", + "Microsoft.VisualStudio.Workload.NativeDesktop", + "Microsoft.VisualStudio.Component.VC.ASAN", + "Microsoft.VisualCpp.ASAN.X86", + "Microsoft.VC.14.34.17.4.ASAN.X86.base", + "Microsoft.VC.14.34.17.4.ASAN.X64.base", + "Microsoft.VC.14.34.17.4.ASAN.Headers.base", + "Microsoft.VisualStudio.VC.IDE.Project.Factories", + "Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest", + "Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest", + "Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest", + "Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest", + "Microsoft.VisualStudio.Component.VC.CMake.Project", + "Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions.CMake", + "Microsoft.VisualStudio.VC.CMake", + "Microsoft.VisualStudio.VC.CMake.Project", + "Microsoft.VisualStudio.VC.CMake.Client", + "Microsoft.VisualStudio.VC.ExternalBuildFramework", + "Microsoft.VisualStudio.Component.VC.DiagnosticTools", + "Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core", + "Microsoft.VisualStudio.PackageGroup.TestTools.Native", + "Microsoft.VisualStudio.Component.VC.Redist.14.Latest", + "Microsoft.VisualStudio.VC.Templates.UnitTest", + "Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core", + "Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP", + "Microsoft.VisualStudio.VC.Templates.UnitTest.Resources", + "Microsoft.VisualStudio.VC.Templates.Desktop", + "Microsoft.VisualStudio.Component.Graphics", + "Microsoft.VisualStudio.Graphics.Viewers", + "Microsoft.VisualStudio.Graphics.Viewers.Resources", + "Microsoft.VisualStudio.Component.VC.ATL.ARM64", + "Microsoft.VisualCpp.ATL.ARM64", + "Microsoft.VC.14.34.17.4.ATL.ARM64.base", + "Microsoft.VisualStudio.Component.VC.ATL", + "Microsoft.VisualStudio.VC.Ide.ATL", + "Microsoft.VisualStudio.VC.Ide.ATL.Resources", + "Microsoft.VisualCpp.ATL.X86", + "Microsoft.VC.14.34.17.4.ATL.X86.base", + "Microsoft.VisualCpp.ATL.X64", + "Microsoft.VC.14.34.17.4.ATL.X64.base", + "Microsoft.VC.14.34.17.4.Props.ATLMFC", + "Microsoft.VisualCpp.ATL.Source", + "Microsoft.VC.14.34.17.4.ATL.Source.base", + "Microsoft.VisualCpp.ATL.Headers", + "Microsoft.VC.14.34.17.4.ATL.Headers.base", + "Microsoft.VC.14.34.17.4.Servicing.ATL", + "Microsoft.VisualStudio.Component.VC.Tools.ARM64", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM64.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM64", + "Microsoft.VS.VC.vcvars.arm64.Shortcuts", + "Microsoft.VisualCpp.CA.Ext.Hostx64.TargetARM64", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.TargetARM64.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx86.TargetARM64", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.TargetARM64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.TargetARM64.Res.base", + "Microsoft.VisualCpp.CA.Ext.HostARM64.TargetARM64", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.TargetARM64.Res.base", + "Microsoft.VisualCpp.Tools.Hostx86.Targetarm64", + "Microsoft.VC.14.34.17.4.Tools.Hostx86.Targetarm64.base", + "Microsoft.VC.14.34.17.4.Tools.HostX86.TargetARM64.Res.base", + "Microsoft.VisualCpp.Tools.HostARM64.TargetARM64", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetARM64.Res.base", + "Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.Redist.ARM64.OneCore.Desktop.base", + "Microsoft.VisualCpp.CRT.Redist.ARM64", + "Microsoft.VC.14.34.17.4.CRT.Redist.ARM64.base", + "Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.ARM64.OneCore.Desktop.base", + "Microsoft.VC.14.34.17.4.CRT.ARM64.OneCore.Desktop.debug.base", + "Microsoft.VisualCpp.CRT.ARM64.Store", + "Microsoft.VC.14.34.17.4.CRT.ARM64.Store.base", + "Microsoft.VisualCpp.CRT.ARM64.Desktop", + "Microsoft.VC.14.34.17.4.CRT.ARM64.Desktop.base", + "Microsoft.VC.14.34.17.4.CRT.ARM64.Desktop.debug.base", + "Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM64", + "Microsoft.VisualCpp.Tools.Core", + "Microsoft.VisualCpp.PGO.ARM64", + "Microsoft.VC.14.34.17.4.PGO.ARM64.base", + "Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm64", + "Microsoft.VC.14.34.17.4.Premium.Tools.Hostx86.Targetarm64.base", + "Microsoft.VC.14.34.17.4.Prem.HostX86.TargetARM64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.Prem.HostX64.TargetARM64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.ARM64.Base", + "Microsoft.VC.14.34.17.4.Premium.Tools.ARM64.Base.base", + "Microsoft.VisualCpp.Tools.HostX64.TargetARM64", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.Props.ARM64", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetARM64.Res.base", + "Microsoft.VisualStudio.Component.VC.Tools.ARM64EC", + "Microsoft.VisualStudio.Component.Windows11SDK.22621", + "Win11SDK_10.0.22621", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM64EC.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM64EC", + "Microsoft.VisualCpp.CRT.ARM64EC.Store", + "Microsoft.VC.14.34.17.4.CRT.ARM64EC.Store.base", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "Microsoft.VisualCpp.CodeAnalysis.Extensions", + "Microsoft.VisualCpp.CA.Ext.HostARM64.Targetx64", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.Targetx64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.Targetx64.Res.base", + "Microsoft.VisualCpp.CA.Ext.HostARM64.Targetx86", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.Targetx86.base", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.Targetx86.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx86.Targetx64", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.Targetx64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.Targetx64.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx86.Targetx86", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.Targetx86.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.Targetx86.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx64.Targetx64", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.Targetx64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.Targetx64.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx64.Targetx86", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.Targetx86.base", + "Microsoft.VC.14.34.17.4.Servicing.CAExtensions", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.Targetx86.Res.base", + "Microsoft.VisualCpp.Tools.HostX64.TargetX86", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetX86.base", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetX86.Res.base", + "Microsoft.VisualCpp.Tools.HostX64.TargetX64", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetX64.base", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetX64.Res.base", + "Microsoft.VisualCpp.Tools.HostARM64.TargetX86", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetX86.base", + "Microsoft.VisualCpp.RuntimeDebug.14", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetX86.Res.base", + "Microsoft.VisualCpp.Tools.HostARM64.TargetX64", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetX64.base", + "Microsoft.VisualCpp.RuntimeDebug.14.ARM64", + "Microsoft.VisualCpp.Redist.14.Latest", + "Microsoft.VisualCpp.Redist.14.Latest", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.Targetx64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX86.TargetX64.base", + "Microsoft.VC.14.34.17.4.Prem.Hostx86.Targetx64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX86.TargetX86.base", + "Microsoft.VC.14.34.17.4.Prem.HostX86.TargetX86.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostARM64.TargetX86", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostARM64.TargetX86.base", + "Microsoft.VC.14.34.17.4.Prem.HostARM64.TargetX86.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostARM64.TargetX64", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostARM64.TargetX64.base", + "Microsoft.VC.14.34.17.4.Prem.HostARM64.Targetx64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX64.TargetX86.base", + "Microsoft.VC.14.34.17.4.Prem.HostX64.TargetX86.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX64.TargetX64.base", + "Microsoft.VC.14.34.17.4.Prem.HostX64.TargetX64.Res.base", + "Microsoft.VisualCpp.PGO.X86", + "Microsoft.VC.14.34.17.4.PGO.X86.base", + "Microsoft.VisualCpp.PGO.X64", + "Microsoft.VC.14.34.17.4.PGO.X64.base", + "Microsoft.VisualCpp.PGO.Headers", + "Microsoft.VC.14.34.17.4.PGO.Headers.base", + "Microsoft.VisualCpp.CRT.x86.Store", + "Microsoft.VC.14.34.17.4.CRT.x86.Store.base", + "Microsoft.VisualCpp.CRT.x86.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.x86.OneCore.Desktop.base", + "Microsoft.VisualCpp.CRT.x64.Store", + "Microsoft.VC.14.34.17.4.CRT.x64.Store.base", + "Microsoft.VisualCpp.CRT.x64.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.x64.OneCore.Desktop.base", + "Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.Redist.x86.OneCore.Desktop.base", + "Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.Redist.x64.OneCore.Desktop.base", + "Microsoft.VisualStudio.PackageGroup.VC.Tools.x86", + "Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Res", + "Microsoft.VisualCpp.Tools.HostX86.TargetX64", + "Microsoft.VC.14.34.17.4.Tools.HostX86.TargetX64.base", + "Microsoft.VC.14.34.17.4.Props.x64", + "Microsoft.VC.14.34.17.4.Tools.Hostx86.Targetx64.Res.base", + "Microsoft.VisualCpp.Tools.HostX86.TargetX86.Res", + "Microsoft.VisualCpp.Tools.HostX86.TargetX86", + "Microsoft.VC.14.34.17.4.Tools.HostX86.TargetX86.base", + "Microsoft.VC.14.34.17.4.Servicing.Compilers", + "Microsoft.VC.14.34.17.4.Props.x86", + "Microsoft.VC.14.34.17.4.Props", + "Microsoft.VC.14.34.17.4.Tools.HostX86.TargetX86.Res.base", + "Microsoft.VisualCpp.Tools.Core.Resources", + "Microsoft.VisualCpp.Tools.Core.x86", + "Microsoft.VC.14.34.17.4.Tools.Core.Props", + "Microsoft.VisualCpp.DIA.SDK", + "Microsoft.VisualCpp.Servicing.DIASDK", + "Microsoft.VisualCpp.CRT.x86.Desktop", + "Microsoft.VC.14.34.17.4.CRT.x86.Desktop.base", + "Microsoft.VisualCpp.CRT.x64.Desktop", + "Microsoft.VC.14.34.17.4.CRT.x64.Desktop.base", + "Microsoft.VisualCpp.CRT.Source", + "Microsoft.VC.14.34.17.4.CRT.Source.base", + "Microsoft.VisualCpp.CRT.Redist.X86", + "Microsoft.VC.14.34.17.4.CRT.Redist.X86.base", + "Microsoft.VisualCpp.CRT.Redist.X64", + "Microsoft.VisualCpp.CRT.Redist.Resources", + "Microsoft.VC.14.34.17.4.CRT.Redist.X64.base", + "Microsoft.VisualCpp.CRT.Headers", + "Microsoft.VC.14.34.17.4.CRT.Headers.base", + "Microsoft.VC.14.34.17.4.Servicing.CrtHeaders", + "Microsoft.VC.14.34.17.4.Servicing", + "Microsoft.VisualStudio.Component.VC.CoreIde", + "Microsoft.VisualStudio.VC.Ide.Pro", + "Microsoft.VisualStudio.VC.Ide.Pro.Resources", + "Microsoft.VisualStudio.VC.Templates.General", + "Microsoft.VisualStudio.VC.Templates.General.Resources", + "Microsoft.VisualStudio.VC.Items.Pro", + "Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced", + "Microsoft.VisualStudio.VC.Ide.x64", + "Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express", + "Microsoft.VisualStudio.VC.vcvars", + "Microsoft.VS.VC.vcvars.x86.Shortcuts", + "Microsoft.VS.VC.vcvars.x64.Shortcuts", + "Microsoft.VS.VC.vcvars.arm64_x64.Shortcuts", + "Microsoft.VisualStudio.VC.MSBuild.v170.X64.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.X64", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM", + "Microsoft.VisualStudio.VC.MSBuild.v170.x86.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.X86", + "Microsoft.VisualStudio.VC.MSBuild.v170.Base", + "Microsoft.VisualStudio.VC.MSBuild.v170.Base.Resources", + "Microsoft.VisualStudio.VC.Ide.WinXPlus", + "Microsoft.VisualStudio.VC.Ide.Dskx", + "Microsoft.VisualStudio.VC.Ide.Dskx.Resources", + "Microsoft.VisualStudio.VC.Ide.Base", + "Microsoft.VisualStudio.VC.Ide.LanguageService", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.Scripts", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.PythonDistro", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.10", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.9", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.8", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.7", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.6", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.5", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.4", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.3", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.2", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.1", + "Microsoft.VisualStudio.VC.Ide.VCPkgDatabase", + "Microsoft.VisualStudio.VC.Ide.Core", + "Microsoft.VisualStudio.VC.Ide.ProjectSystem", + "Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources", + "Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine", + "Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources", + "Microsoft.VisualStudio.VC.Ide.LanguageService.Resources", + "Microsoft.VisualStudio.VC.Llvm.Base", + "Microsoft.VisualStudio.VC.Ide.Base.Resources", + "Microsoft.Net.PackageGroup.4.8.1.Redist", + "Microsoft.VisualStudio.Component.IntelliCode", + "Microsoft.VisualStudio.IntelliCode.CSharp", + "Microsoft.VisualStudio.IntelliCode", + "Component.Microsoft.VisualStudio.LiveShare.2022", + "Microsoft.VisualStudio.Component.Debugger.JustInTime", + "Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi", + "Microsoft.VisualStudio.Debugger.JustInTime", + "Microsoft.VisualStudio.Debugger.JustInTime.Msi", + "Microsoft.VisualStudio.LiveShare.2022", + "Microsoft.Icecap.Analysis", + "Microsoft.Icecap.Analysis.Resources", + "Microsoft.Icecap.Analysis.Resources.Targeted", + "Microsoft.Icecap.Collection.Msi", + "Microsoft.Icecap.Collection.Msi.Targeted", + "Microsoft.Icecap.Collection.Msi.Resources", + "Microsoft.Icecap.Collection.Msi.Resources.Targeted", + "Microsoft.DiagnosticsHub.Instrumentation", + "Microsoft.DiagnosticsHub.Instrumentation.Targeted", + "Microsoft.DiagnosticsHub.CpuSampling", + "Microsoft.DiagnosticsHub.CpuSampling.Targeted", + "Microsoft.PackageGroup.DiagnosticsHub.Platform", + "Microsoft.VisualStudio.InstrumentationEngine.ARM64", + "Microsoft.VisualStudio.InstrumentationEngine", + "Microsoft.DiagnosticsHub.Runtime.ExternalDependencies", + "SQLiteCore", + "SQLiteCore.Targeted", + "Microsoft.DiagnosticsHub.Runtime.ExternalDependencies.Targeted", + "Microsoft.DiagnosticsHub.Runtime", + "Microsoft.DiagnosticsHub.Runtime.Targeted", + "Microsoft.DiagnosticsHub.Collection.ExternalDependencies.arm64", + "Microsoft.DiagnosticsHub.Collection", + "Microsoft.DiagnosticsHub.Collection.Service", + "Microsoft.VisualStudio.VC.Ide.MDD", + "Microsoft.VisualStudio.VC.Ide.Linux.ConnectionManager", + "Microsoft.VisualStudio.VisualC.Utilities", + "Microsoft.VisualStudio.VisualC.Utilities.Resources", + "Microsoft.VisualStudio.VC.Ide.Linux.ConnectionManager.Resources", + "Microsoft.VisualStudio.VC.Ide.ResourceEditor", + "Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources", + "Microsoft.VisualStudio.PackageGroup.TestTools.Core", + "Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI", + "Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI", + "Microsoft.VisualStudio.TestTools.Pex.Common", + "Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI", + "Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy", + "Microsoft.VisualStudio.PackageGroup.MinShell.Interop", + "Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi", + "Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestSettings", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common", + "Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent", + "Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE", + "Microsoft.VisualStudio.Cache.Service", + "Microsoft.VisualStudio.TestTools.TestWIExtension", + "Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI", + "Microsoft.VisualStudio.TestTools.TestPlatform.IDE", + "Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage", + "Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors", + "Microsoft.VisualStudio.Component.NuGet", + "Microsoft.CredentialProvider", + "Microsoft.VisualStudio.NuGet.Licenses", + "Microsoft.VisualStudio.Component.TextTemplating", + "Microsoft.VisualStudio.TextTemplating.MSBuild", + "Microsoft.VisualStudio.TextTemplating.Integration", + "Microsoft.VisualStudio.TextTemplating.Core", + "Microsoft.VisualStudio.TextTemplating.Integration.Resources", + "Microsoft.VisualCpp.CRT.ClickOnce.Msi", + "Microsoft.VisualStudio.Component.Roslyn.LanguageServices", + "Microsoft.VisualStudio.InteractiveWindow", + "Microsoft.DiaSymReader.Native", + "Microsoft.VisualCpp.Redist.14", + "Microsoft.VisualCpp.Redist.14", + "Microsoft.VisualCpp.Servicing.Redist", + "Microsoft.VisualStudio.PackageGroup.StaticAnalysis", + "Microsoft.VisualStudio.StaticAnalysis.IDE", + "Microsoft.VisualStudio.StaticAnalysis.IDE.Resources", + "Microsoft.VisualStudio.StaticAnalysis.FxCop.Resources", + "Microsoft.VisualStudio.StaticAnalysis.auxil", + "Microsoft.VisualStudio.StaticAnalysis.auxil.Resources", + "Roslyn.VisualStudio.Setup.ServiceHub", + "Microsoft.Component.MSBuild", + "Microsoft.NuGet.Build.Tasks.Setup", + "Microsoft.VisualStudio.Component.Roslyn.Compiler", + "Microsoft.CodeAnalysis.Compilers", + "Microsoft.VisualStudio.Component.JavaScript.TypeScript", + "Microsoft.VisualStudio.JavaScript.ProjectSystem", + "Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions", + "Microsoft.VisualStudio.ProTools", + "sqlsysclrtypes", + "SQLCommon", + "Microsoft.VisualStudio.ProTools.Resources", + "Microsoft.VisualStudio.Web.Scaffolding", + "Microsoft.VisualStudio.WebToolsExtensions", + "Microsoft.VisualStudio.ConnectedServices.Core", + "Microsoft.VisualStudio.WebTools", + "Microsoft.VisualStudio.WebToolsExtensions.MSBuild", + "Microsoft.VisualStudio.WebTools.Resources", + "Microsoft.VisualStudio.WebTools.WSP.FSA", + "Microsoft.VisualStudio.WebTools.WSP.FSA.Resources", + "Microsoft.VisualStudio.PackageGroup.Debugger.Script", + "Microsoft.VisualStudio.Component.TypeScript.TSServer", + "Microsoft.VisualStudio.Package.TypeScript.TSServer", + "Microsoft.VisualStudio.PackageGroup.JavaScript.Language", + "Microsoft.VisualStudio.Package.NodeJs", + "TypeScript.Build", + "TypeScript.LanguageService", + "TypeScript.Tools", + "Microsoft.VisualStudio.PackageGroup.Community", + "Microsoft.VisualStudio.Community.VB.x86", + "Microsoft.VisualStudio.Community.VB.x64", + "Microsoft.VisualStudio.PackageGroup.Core", + "Microsoft.VisualStudio.CodeSense.Community", + "Microsoft.VisualStudio.TestTools.TeamFoundationClient", + "Microsoft.VisualStudio.PackageGroup.Debugger.Core", + "Microsoft.VisualStudio.Debugger.BrokeredServices", + "Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost", + "Microsoft.VisualStudio.Debugger.AzureAttach", + "Microsoft.VisualStudio.Web.Azure.Common", + "Microsoft.WebTools.Shared", + "Microsoft.WebTools.DotNet.Core.ItemTemplates", + "Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Replay", + "Microsoft.VisualStudio.VC.Ide.Debugger", + "Microsoft.VisualStudio.VC.Ide.Debugger.Concord", + "Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources", + "Microsoft.VisualStudio.VC.Ide.Debugger.Resources", + "Microsoft.VisualStudio.VC.Ide.Common", + "Microsoft.VisualStudio.VC.Ide.Common.Resources", + "Microsoft.VisualStudio.Debugger.CollectionAgents", + "Microsoft.VisualStudio.Debugger.Parallel", + "Microsoft.VisualStudio.Debugger.Parallel.Resources", + "Microsoft.VisualStudio.Debugger.Managed", + "Microsoft.CodeAnalysis.ExpressionEvaluator", + "Microsoft.CodeAnalysis.VisualStudio.Setup", + "Microsoft.VisualStudio.Debugger.Concord.Managed", + "Microsoft.VisualStudio.Debugger.Concord.Managed.Resources", + "Microsoft.VisualStudio.Debugger.Managed.Resources", + "Microsoft.VisualStudio.Debugger.TargetComposition", + "Microsoft.VisualStudio.Debugger.TargetComposition.Remote.arm64", + "Microsoft.VisualStudio.Debugger.TargetComposition.Remote", + "Microsoft.VisualStudio.Debugger.TargetComposition.Remote", + "Microsoft.VisualStudio.Debugger.Remote", + "Microsoft.VisualStudio.Debugger.Concord.Remote", + "Microsoft.VisualStudio.Debugger.Concord.Remote.Resources", + "Microsoft.VisualStudio.Debugger.Remote", + "Microsoft.VisualStudio.Debugger.Remote.ARM64", + "Microsoft.VisualStudio.Debugger.Concord.Remote.ARM64", + "Microsoft.VisualStudio.Debugger.Concord.Remote.Resources.ARM64", + "Microsoft.VisualStudio.Debugger.Remote.ARM", + "Microsoft.VisualStudio.Debugger.Concord.Remote.ARM", + "Microsoft.VisualStudio.Debugger.Concord.Remote.Resources.ARM", + "Microsoft.VisualStudio.Debugger.Remote.Resources.ARM", + "Microsoft.VisualStudio.Debugger.Remote.Resources.ARM64", + "Microsoft.VisualStudio.Debugger.Concord.Remote", + "Microsoft.VisualStudio.Debugger.Concord.Remote.Resources", + "Microsoft.VisualStudio.Debugger.Remote.Resources", + "Microsoft.VisualStudio.Debugger.Remote.Resources", + "Microsoft.VisualStudio.Debugger", + "Microsoft.VisualStudio.VC.MSVCDis", + "Microsoft.IntelliTrace.DiagnosticsHub", + "Microsoft.VisualStudio.Debugger.Concord", + "Microsoft.VisualStudio.Debugger.Concord.Resources", + "Microsoft.VisualStudio.Debugger.Resources", + "Microsoft.VisualStudio.Debugger.Package.DiagHub.Client", + "Microsoft.VisualStudio.Debugger.Remote.DiagnosticsHub.Client", + "Microsoft.VisualStudio.Debugger.Remote.DiagnosticsHub.Client", + "Microsoft.VisualStudio.Debugger.Remote.DiagnosticsHub.Client", + "Microsoft.PackageGroup.ClientDiagnostics", + "Microsoft.VisualStudio.AppResponsiveness", + "Microsoft.VisualStudio.AppResponsiveness.Targeted", + "Microsoft.VisualStudio.AppResponsiveness.Resources", + "Microsoft.VisualStudio.ClientDiagnostics", + "Microsoft.VisualStudio.ClientDiagnostics.Targeted", + "Microsoft.VisualStudio.ClientDiagnostics.Resources", + "Microsoft.VisualStudio.PackageGroup.CommunityCore", + "Microsoft.VisualStudio.ProjectSystem.Full", + "Microsoft.VisualStudio.LiveShareApi", + "Microsoft.VisualStudio.ProjectSystem.Query", + "Microsoft.VisualStudio.ProjectSystem", + "Microsoft.VisualStudio.Community.x86", + "Microsoft.VisualStudio.Community.x64", + "Microsoft.VisualStudio.Community.Msi.Resources", + "Microsoft.VisualStudio.Community.Msi", + "Microsoft.VisualStudio.Community.Shared.Msi", + "Microsoft.VisualStudio.Devenv.Msi", + "Microsoft.VisualStudio.Devenv.Shared.Msi", + "Microsoft.VisualStudio.MinShell.Interop.Msi", + "Microsoft.VisualStudio.MinShell.Interop.Shared.Msi", + "Microsoft.VisualStudio.Editors", + "Microsoft.VisualStudio.Workload.CoreEditor", + "Microsoft.VisualStudio.Component.CoreEditor", + "Microsoft.VisualStudio.PackageGroup.CoreEditor", + "Microsoft.WebView2", + "Microsoft.VisualStudio.ScriptedHost", + "Microsoft.VisualStudio.ScriptedHost.Targeted", + "Microsoft.VisualCpp.Tools.Common.UtilsPrereq", + "Microsoft.VisualCpp.Tools.Common.Utils", + "Microsoft.VisualCpp.Tools.Common.Utils.Resources", + "Microsoft.VisualStudio.PackageGroup.VsDevCmd", + "Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk", + "Microsoft.VisualStudio.VsDevCmd.Core.WinSdk", + "Microsoft.VisualStudio.VsDevCmd.Core.DotNet", + "Microsoft.VisualStudio.VC.DevCmd", + "Microsoft.VisualStudio.VC.DevCmd.Resources", + "Microsoft.VisualStudio.VirtualTree", + "Microsoft.DiaSymReader", + "Microsoft.Build.Dependencies", + "Microsoft.Build.FileTracker.Msi", + "Microsoft.Build", + "Microsoft.VisualStudio.PackageGroup.NuGet", + "Microsoft.DataAI.NuGetRecommender", + "Microsoft.VisualStudio.NuGet.Core", + "Microsoft.Build.Arm64", + "Microsoft.Build.UnGAC", + "Microsoft.VisualStudio.TextMateGrammars", + "Microsoft.VisualStudio.Platform.Markdown", + "Microsoft.VisualStudio.Platform.CrossRepositorySearch", + "Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common", + "Microsoft.VisualStudio.TeamExplorer", + "Microsoft.VisualStudio.PackageGroup.ServiceHub", + "Microsoft.ServiceHub.Node", + "Microsoft.ServiceHub.Managed", + "Microsoft.ServiceHub.arm64", + "Microsoft.VisualStudio.ProjectServices", + "Microsoft.VisualStudio.OpenFolder.VSIX", + "Microsoft.VisualStudio.FileHandler.Msi", + "Microsoft.VisualStudio.FileHandler.Msi", + "Microsoft.VisualStudio.PackageGroup.MinShell", + "Microsoft.VisualStudio.MinShell.Msi", + "Microsoft.VisualStudio.MinShell.Shared.Msi", + "Microsoft.VisualStudio.MinShell.Msi.Resources", + "Microsoft.VisualStudio.MinShell.Interop", + "CoreEditorFonts", + "Microsoft.VisualStudio.Log", + "Microsoft.VisualStudio.Log.Targeted", + "Microsoft.VisualStudio.Log.Resources", + "Microsoft.VisualStudio.Finalizer", + "Microsoft.VisualStudio.Devenv", + "Microsoft.VisualStudio.Devenv.Resources", + "Microsoft.VisualStudio.CoreEditor", + "Microsoft.VisualStudio.Navigation.RichCodeNav", + "Microsoft.VisualStudio.Platform.NavigateTo", + "Microsoft.VisualStudio.Connected", + "SQLitePCLRaw", + "SQLitePCLRaw.Targeted", + "Microsoft.VisualStudio.Connected.Auto", + "Microsoft.VisualStudio.Connected.Auto.Resources", + "Microsoft.VisualStudio.AzureSDK", + "Microsoft.VisualStudio.PerfLib", + "Microsoft.VisualStudio.Connected.Resources", + "Microsoft.Net.PackageGroup.4.8.Redist", + "Microsoft.VisualStudio.PackageGroup.Progression", + "Microsoft.VisualStudio.PerformanceProvider", + "Microsoft.VisualStudio.GraphModel", + "Microsoft.VisualStudio.GraphProvider", + "Microsoft.VisualStudio.Community.VB.Targeted", + "Microsoft.VisualStudio.Community.VB.Neutral", + "Microsoft.VisualStudio.Community.CSharp.Targeted", + "Microsoft.VisualStudio.Community.CSharp.Neutral", + "Microsoft.VisualStudio.Community.ProductArch.TargetedExtra", + "Microsoft.VisualStudio.Community.ProductArch.Targeted", + "Microsoft.VisualStudio.Community.ProductArch.NeutralExtra", + "Microsoft.DiaSymReader.PortablePdb", + "Microsoft.IntelliTrace.CollectorCab", + "Microsoft.VisualStudio.Community.VB.Resources.Targeted", + "Microsoft.VisualStudio.Community.VB.Resources.Neutral", + "Microsoft.VisualStudio.Community.CSharp.Resources.Targeted", + "Microsoft.VisualStudio.Community.CSharp.Resources.Neutral", + "Microsoft.VisualStudio.Community.ProductArch.Resources.Targeted", + "Microsoft.VisualStudio.Community.ProductArch.Resources.NeutralExtra", + "Microsoft.VisualStudio.Net.Eula.Resources", + "Microsoft.VisualStudio.Community.ProductArch.Resources.Neutral", + "Microsoft.VisualStudio.WebSiteProject.DTE", + "Microsoft.VisualStudio.Diagnostics.AspNetHelper", + "Microsoft.VisualStudio.Diagnostics.AspNetHelper.Standard", + "Microsoft.MSHtml", + "Microsoft.VisualStudio.Platform.CallHierarchy", + "Microsoft.VisualStudio.Community.ProductArch.Neutral", + "Microsoft.VisualStudio.MinShell", + "Microsoft.VisualStudio.VsWebProtocolSelector.Msi", + "Microsoft.Net.6.WindowsDesktop.Runtime", + "Microsoft.Net.6.Runtime", + "Microsoft.VisualStudio.PackageGroup.Setup.Common", + "Microsoft.VisualStudio.Setup.WMIProvider", + "Microsoft.VisualStudio.Setup.Configuration.Interop", + "Microsoft.VisualStudio.Setup.Configuration", + "Microsoft.VisualStudio.Extensibility.Container", + "Microsoft.VisualStudio.LanguageServer", + "Microsoft.VisualStudio.Platform.Terminal", + "Microsoft.VisualStudio.MefHosting", + "Microsoft.VisualStudio.Initializer", + "Microsoft.VisualStudio.ExtensionManager", + "Microsoft.VisualStudio.Platform.Editor", + "Microsoft.VisualStudio.MinShell.Targeted", + "Microsoft.VisualStudio.NativeImageSupport", + "Microsoft.VisualStudio.Devenv.Config", + "Microsoft.VisualStudio.MinShell.Resources.arm64", + "Microsoft.VisualStudio.MinShell.Auto", + "Microsoft.VisualStudio.MinShell.Auto.Resources", + "Microsoft.VisualStudio.Branding.Community" + ] + } +] diff --git a/node_modules/node-gyp/test/reporter.js b/node_modules/node-gyp/test/reporter.js new file mode 100644 index 0000000000000..9964b1b5d504c --- /dev/null +++ b/node_modules/node-gyp/test/reporter.js @@ -0,0 +1,75 @@ +const Mocha = require('mocha') + +class Reporter { + constructor (runner) { + this.failedTests = [] + + runner.on(Mocha.Runner.constants.EVENT_RUN_BEGIN, () => { + console.log('Starting tests') + }) + + runner.on(Mocha.Runner.constants.EVENT_RUN_END, () => { + console.log('Tests finished') + console.log() + console.log('****************') + console.log('* TESTS REPORT *') + console.log('****************') + console.log() + console.log(`Executed ${runner.stats.suites} suites with ${runner.stats.tests} tests in ${runner.stats.duration} ms`) + console.log(` Passed: ${runner.stats.passes}`) + console.log(` Skipped: ${runner.stats.pending}`) + console.log(` Failed: ${runner.stats.failures}`) + if (this.failedTests.length > 0) { + console.log() + console.log(' Failed test details') + this.failedTests.forEach((failedTest, index) => { + console.log() + console.log(` ${index + 1}.'${failedTest.test.fullTitle()}'`) + console.log(` Name: ${failedTest.error.name}`) + console.log(` Message: ${failedTest.error.message}`) + console.log(` Code: ${failedTest.error.code}`) + console.log(` Stack: ${failedTest.error.stack}`) + }) + } + console.log() + }) + + runner.on(Mocha.Runner.constants.EVENT_SUITE_BEGIN, (suite) => { + if (suite.root) { + return + } + console.log(`Starting suite '${suite.title}'`) + }) + + runner.on(Mocha.Runner.constants.EVENT_SUITE_END, (suite) => { + if (suite.root) { + return + } + console.log(`Suite '${suite.title}' finished`) + console.log() + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_BEGIN, (test) => { + console.log(`Starting test '${test.title}'`) + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_PASS, (test) => { + console.log(`Test '${test.title}' passed in ${test.duration} ms`) + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_PENDING, (test) => { + console.log(`Test '${test.title}' skipped in ${test.duration} ms`) + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_FAIL, (test, error) => { + this.failedTests.push({ test, error }) + console.log(`Test '${test.title}' failed in ${test.duration} ms with ${error}`) + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_END, (test) => { + console.log() + }) + } +} + +module.exports = Reporter diff --git a/node_modules/node-gyp/test/test-addon.js b/node_modules/node-gyp/test/test-addon.js index f79eff73c169e..43556620a85ab 100644 --- a/node_modules/node-gyp/test/test-addon.js +++ b/node_modules/node-gyp/test/test-addon.js @@ -1,6 +1,7 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const path = require('path') const fs = require('graceful-fs') const childProcess = require('child_process') @@ -35,116 +36,117 @@ function checkCharmapValid () { return lines.pop() === 'True' } -test('build simple addon', function (t) { - t.plan(3) - - // Set the loglevel otherwise the output disappears when run via 'npm test' - var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello().trim(), 'world') +describe('addon', function () { + this.timeout(300000) + + it('build simple addon', function (done) { + // Set the loglevel otherwise the output disappears when run via 'npm test' + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + assert.strictEqual(err, null) + assert.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + assert.strictEqual(runHello().trim(), 'world') + done() + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') -}) - -test('build simple addon in path with non-ascii characters', function (t) { - t.plan(1) - if (!checkCharmapValid()) { - return t.skip('python console app can\'t encode non-ascii character.') - } + it('build simple addon in path with non-ascii characters', function (done) { + if (!checkCharmapValid()) { + return this.skip('python console app can\'t encode non-ascii character.') + } - var testDirNames = { - cp936: '文件夹', - cp1252: 'Latīna', - cp932: 'フォルダ' - } - // Select non-ascii characters by current encoding - var testDirName = testDirNames[getEncoding()] - // If encoding is UTF-8 or other then no need to test - if (!testDirName) { - return t.skip('no need to test') - } + var testDirNames = { + cp936: '文件夹', + cp1252: 'Latīna', + cp932: 'フォルダ' + } + // Select non-ascii characters by current encoding + var testDirName = testDirNames[getEncoding()] + // If encoding is UTF-8 or other then no need to test + if (!testDirName) { + return this.skip('no need to test') + } - t.plan(3) + this.timeout(300000) - var data - var configPath = path.join(addonPath, 'build', 'config.gypi') - try { - data = fs.readFileSync(configPath, 'utf8') - } catch (err) { - t.error(err) - return - } - var config = JSON.parse(data.replace(/#.+\n/, '')) - var nodeDir = config.variables.nodedir - var testNodeDir = path.join(addonPath, testDirName) - // Create symbol link to path with non-ascii characters - try { - fs.symlinkSync(nodeDir, testNodeDir, 'dir') - } catch (err) { - switch (err.code) { - case 'EEXIST': break - case 'EPERM': - t.error(err, 'Please try to running console as an administrator') - return - default: - t.error(err) - return + var data + var configPath = path.join(addonPath, 'build', 'config.gypi') + try { + data = fs.readFileSync(configPath, 'utf8') + } catch (err) { + assert.fail(err) + return } - } - - var cmd = [ - nodeGyp, - 'rebuild', - '-C', - addonPath, - '--loglevel=verbose', - '-nodedir=' + testNodeDir - ] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var config = JSON.parse(data.replace(/#.+\n/, '')) + var nodeDir = config.variables.nodedir + var testNodeDir = path.join(addonPath, testDirName) + // Create symbol link to path with non-ascii characters try { - fs.unlink(testNodeDir) + fs.symlinkSync(nodeDir, testNodeDir, 'dir') } catch (err) { - t.error(err) + switch (err.code) { + case 'EEXIST': break + case 'EPERM': + assert.fail(err, null, 'Please try to running console as an administrator') + return + default: + assert.fail(err) + return + } } - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello().trim(), 'world') + var cmd = [ + nodeGyp, + 'rebuild', + '-C', + addonPath, + '--loglevel=verbose', + '-nodedir=' + testNodeDir + ] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + try { + fs.unlink(testNodeDir) + } catch (err) { + assert.fail(err) + } + + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + assert.strictEqual(err, null) + assert.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + assert.strictEqual(runHello().trim(), 'world') + done() + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') -}) - -test('addon works with renamed host executable', function (t) { - // No `fs.copyFileSync` before node8. - if (process.version.substr(1).split('.')[0] < 8) { - t.skip('skipping test for old node version') - t.end() - return - } - t.plan(3) - - var notNodePath = path.join(os.tmpdir(), 'notnode' + path.extname(process.execPath)) - fs.copyFileSync(process.execPath, notNodePath) + it('addon works with renamed host executable', function (done) { + // No `fs.copyFileSync` before node8. + if (process.version.substr(1).split('.')[0] < 8) { + return this.skip('skipping test for old node version') + } - var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello(notNodePath).trim(), 'world') - fs.unlinkSync(notNodePath) + this.timeout(300000) + + var notNodePath = path.join(os.tmpdir(), 'notnode' + path.extname(process.execPath)) + fs.copyFileSync(process.execPath, notNodePath) + + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + assert.strictEqual(err, null) + assert.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + assert.strictEqual(runHello(notNodePath).trim(), 'world') + fs.unlinkSync(notNodePath) + done() + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') }) diff --git a/node_modules/node-gyp/test/test-configure-python.js b/node_modules/node-gyp/test/test-configure-python.js index aacd75f7c7294..ab1e5511fad98 100644 --- a/node_modules/node-gyp/test/test-configure-python.js +++ b/node_modules/node-gyp/test/test-configure-python.js @@ -1,6 +1,7 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const path = require('path') const devDir = require('./common').devDir() const gyp = require('../lib/node-gyp') @@ -22,63 +23,59 @@ const configure = requireInject('../lib/configure', { const EXPECTED_PYPATH = path.join(__dirname, '..', 'gyp', 'pylib') const SEPARATOR = process.platform === 'win32' ? ';' : ':' -const SPAWN_RESULT = { on: function () { } } +const SPAWN_RESULT = cb => ({ on: function () { cb() } }) require('npmlog').level = 'warn' -test('configure PYTHONPATH with no existing env', function (t) { - t.plan(1) - - delete process.env.PYTHONPATH - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, EXPECTED_PYPATH) - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) -}) - -test('configure PYTHONPATH with existing env of one dir', function (t) { - t.plan(2) - - var existingPath = path.join('a', 'b') - process.env.PYTHONPATH = existingPath - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) - - var dirs = process.env.PYTHONPATH.split(SEPARATOR) - t.deepEqual(dirs, [EXPECTED_PYPATH, existingPath]) - - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) -}) - -test('configure PYTHONPATH with existing env of multiple dirs', function (t) { - t.plan(2) - - var pythonDir1 = path.join('a', 'b') - var pythonDir2 = path.join('b', 'c') - var existingPath = [pythonDir1, pythonDir2].join(SEPARATOR) - process.env.PYTHONPATH = existingPath - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) - - var dirs = process.env.PYTHONPATH.split(SEPARATOR) - t.deepEqual(dirs, [EXPECTED_PYPATH, pythonDir1, pythonDir2]) - - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) +describe('configure-python', function () { + it('configure PYTHONPATH with no existing env', function (done) { + delete process.env.PYTHONPATH + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + assert.strictEqual(process.env.PYTHONPATH, EXPECTED_PYPATH) + return SPAWN_RESULT(done) + } + prog.devDir = devDir + configure(prog, [], assert.fail) + }) + + it('configure PYTHONPATH with existing env of one dir', function (done) { + var existingPath = path.join('a', 'b') + process.env.PYTHONPATH = existingPath + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + assert.strictEqual(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) + + var dirs = process.env.PYTHONPATH.split(SEPARATOR) + assert.deepStrictEqual(dirs, [EXPECTED_PYPATH, existingPath]) + + return SPAWN_RESULT(done) + } + prog.devDir = devDir + configure(prog, [], assert.fail) + }) + + it('configure PYTHONPATH with existing env of multiple dirs', function (done) { + var pythonDir1 = path.join('a', 'b') + var pythonDir2 = path.join('b', 'c') + var existingPath = [pythonDir1, pythonDir2].join(SEPARATOR) + process.env.PYTHONPATH = existingPath + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + assert.strictEqual(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) + + var dirs = process.env.PYTHONPATH.split(SEPARATOR) + assert.deepStrictEqual(dirs, [EXPECTED_PYPATH, pythonDir1, pythonDir2]) + + return SPAWN_RESULT(done) + } + prog.devDir = devDir + configure(prog, [], assert.fail) + }) }) diff --git a/node_modules/node-gyp/test/test-create-config-gypi.js b/node_modules/node-gyp/test/test-create-config-gypi.js index eeac73fab1dcc..725819b6aa102 100644 --- a/node_modules/node-gyp/test/test-create-config-gypi.js +++ b/node_modules/node-gyp/test/test-create-config-gypi.js @@ -1,70 +1,61 @@ 'use strict' const path = require('path') -const { test } = require('tap') +const { describe, it } = require('mocha') +const assert = require('assert') const gyp = require('../lib/node-gyp') const createConfigGypi = require('../lib/create-config-gypi') const { parseConfigGypi, getCurrentConfigGypi } = createConfigGypi.test -test('config.gypi with no options', async function (t) { - t.plan(2) +describe('create-config-gypi', function () { + it('config.gypi with no options', async function () { + const prog = gyp() + prog.parseArgv([]) - const prog = gyp() - prog.parseArgv([]) + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + assert.strictEqual(config.target_defaults.default_configuration, 'Release') + assert.strictEqual(config.variables.target_arch, process.arch) + }) - const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) - t.equal(config.target_defaults.default_configuration, 'Release') - t.equal(config.variables.target_arch, process.arch) -}) - -test('config.gypi with --debug', async function (t) { - t.plan(1) - - const prog = gyp() - prog.parseArgv(['_', '_', '--debug']) + it('config.gypi with --debug', async function () { + const prog = gyp() + prog.parseArgv(['_', '_', '--debug']) - const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) - t.equal(config.target_defaults.default_configuration, 'Debug') -}) + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + assert.strictEqual(config.target_defaults.default_configuration, 'Debug') + }) -test('config.gypi with custom options', async function (t) { - t.plan(1) + it('config.gypi with custom options', async function () { + const prog = gyp() + prog.parseArgv(['_', '_', '--shared-libxml2']) - const prog = gyp() - prog.parseArgv(['_', '_', '--shared-libxml2']) + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + assert.strictEqual(config.variables.shared_libxml2, true) + }) - const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) - t.equal(config.variables.shared_libxml2, true) -}) + it('config.gypi with nodedir', async function () { + const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') -test('config.gypi with nodedir', async function (t) { - t.plan(1) + const prog = gyp() + prog.parseArgv(['_', '_', `--nodedir=${nodeDir}`]) - const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') + const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) + assert.strictEqual(config.variables.build_with_electron, true) + }) - const prog = gyp() - prog.parseArgv(['_', '_', `--nodedir=${nodeDir}`]) + it('config.gypi with --force-process-config', async function () { + const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') - const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) - t.equal(config.variables.build_with_electron, true) -}) - -test('config.gypi with --force-process-config', async function (t) { - t.plan(1) - - const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') - - const prog = gyp() - prog.parseArgv(['_', '_', '--force-process-config', `--nodedir=${nodeDir}`]) - - const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) - t.equal(config.variables.build_with_electron, undefined) -}) + const prog = gyp() + prog.parseArgv(['_', '_', '--force-process-config', `--nodedir=${nodeDir}`]) -test('config.gypi parsing', function (t) { - t.plan(1) + const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) + assert.strictEqual(config.variables.build_with_electron, undefined) + }) - const str = "# Some comments\n{'variables': {'multiline': 'A'\n'B'}}" - const config = parseConfigGypi(str) - t.deepEqual(config, { variables: { multiline: 'AB' } }) + it('config.gypi parsing', function () { + const str = "# Some comments\n{'variables': {'multiline': 'A'\n'B'}}" + const config = parseConfigGypi(str) + assert.deepStrictEqual(config, { variables: { multiline: 'AB' } }) + }) }) diff --git a/node_modules/node-gyp/test/test-download.js b/node_modules/node-gyp/test/test-download.js index c4caad9e8346d..1dd5a51b062c0 100644 --- a/node_modules/node-gyp/test/test-download.js +++ b/node_modules/node-gyp/test/test-download.js @@ -1,6 +1,7 @@ 'use strict' -const { test } = require('tap') +const { describe, it, after } = require('mocha') +const assert = require('assert') const fs = require('fs') const path = require('path') const util = require('util') @@ -16,202 +17,194 @@ const certs = require('./fixtures/certs') log.level = 'warn' -test('download over http', async (t) => { - t.plan(2) - - const server = http.createServer((req, res) => { - t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) - res.end('ok') - }) - - t.tearDown(() => new Promise((resolve) => server.close(resolve))) - - const host = 'localhost' - await new Promise((resolve) => server.listen(0, host, resolve)) - const { port } = server.address() - const gyp = { - opts: {}, - version: '42' - } - const url = `http://${host}:${port}` - const res = await install.test.download(gyp, url) - t.strictEqual(await res.text(), 'ok') -}) - -test('download over https with custom ca', async (t) => { - t.plan(3) - - const cafile = path.join(__dirname, 'fixtures/ca.crt') - const cacontents = certs['ca.crt'] - const cert = certs['server.crt'] - const key = certs['server.key'] - await fs.promises.writeFile(cafile, cacontents, 'utf8') - const ca = await install.test.readCAFile(cafile) - - t.strictEqual(ca.length, 1) - - const options = { ca: ca, cert: cert, key: key } - const server = https.createServer(options, (req, res) => { - t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) - res.end('ok') +describe('download', function () { + it('download over http', async function () { + const server = http.createServer((req, res) => { + assert.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + after(() => new Promise((resolve) => server.close(resolve))) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + const gyp = { + opts: {}, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + assert.strictEqual(await res.text(), 'ok') }) - t.tearDown(async () => { - await new Promise((resolve) => server.close(resolve)) - await fs.promises.unlink(cafile) + it('download over https with custom ca', async function () { + const cafile = path.join(__dirname, 'fixtures/ca.crt') + const cacontents = certs['ca.crt'] + const cert = certs['server.crt'] + const key = certs['server.key'] + await fs.promises.writeFile(cafile, cacontents, 'utf8') + const ca = await install.test.readCAFile(cafile) + + assert.strictEqual(ca.length, 1) + + const options = { ca: ca, cert: cert, key: key } + const server = https.createServer(options, (req, res) => { + assert.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + after(async () => { + await new Promise((resolve) => server.close(resolve)) + await fs.promises.unlink(cafile) + }) + + server.on('clientError', (err) => { throw err }) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + const gyp = { + opts: { cafile }, + version: '42' + } + const url = `https://${host}:${port}` + const res = await install.test.download(gyp, url) + assert.strictEqual(await res.text(), 'ok') }) - server.on('clientError', (err) => { throw err }) - - const host = 'localhost' - await new Promise((resolve) => server.listen(0, host, resolve)) - const { port } = server.address() - const gyp = { - opts: { cafile }, - version: '42' - } - const url = `https://${host}:${port}` - const res = await install.test.download(gyp, url) - t.strictEqual(await res.text(), 'ok') -}) - -test('download over http with proxy', async (t) => { - t.plan(2) - - const server = http.createServer((_, res) => { - res.end('ok') + it('download over http with proxy', async function () { + const server = http.createServer((_, res) => { + res.end('ok') + }) + + const pserver = http.createServer((req, res) => { + assert.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('proxy ok') + }) + + after(() => Promise.all([ + new Promise((resolve) => server.close(resolve)), + new Promise((resolve) => pserver.close(resolve)) + ])) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) + const gyp = { + opts: { + proxy: `http://${host}:${port + 1}`, + noproxy: 'bad' + }, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + assert.strictEqual(await res.text(), 'proxy ok') }) - const pserver = http.createServer((req, res) => { - t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) - res.end('proxy ok') + it('download over http with noproxy', async function () { + const server = http.createServer((req, res) => { + assert.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + const pserver = http.createServer((_, res) => { + res.end('proxy ok') + }) + + after(() => Promise.all([ + new Promise((resolve) => server.close(resolve)), + new Promise((resolve) => pserver.close(resolve)) + ])) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) + const gyp = { + opts: { + proxy: `http://${host}:${port + 1}`, + noproxy: host + }, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + assert.strictEqual(await res.text(), 'ok') }) - t.tearDown(() => Promise.all([ - new Promise((resolve) => server.close(resolve)), - new Promise((resolve) => pserver.close(resolve)) - ])) - - const host = 'localhost' - await new Promise((resolve) => server.listen(0, host, resolve)) - const { port } = server.address() - await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) - const gyp = { - opts: { - proxy: `http://${host}:${port + 1}`, - noproxy: 'bad' - }, - version: '42' - } - const url = `http://${host}:${port}` - const res = await install.test.download(gyp, url) - t.strictEqual(await res.text(), 'proxy ok') -}) - -test('download over http with noproxy', async (t) => { - t.plan(2) - - const server = http.createServer((req, res) => { - t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) - res.end('ok') + it('download with missing cafile', async function () { + const gyp = { + opts: { cafile: 'no.such.file' } + } + try { + await install.test.download(gyp, {}, 'http://bad/') + } catch (e) { + assert.ok(/no.such.file/.test(e.message)) + } }) - const pserver = http.createServer((_, res) => { - res.end('proxy ok') + it('check certificate splitting', async function () { + const cafile = path.join(__dirname, 'fixtures/ca-bundle.crt') + const cacontents = certs['ca-bundle.crt'] + await fs.promises.writeFile(cafile, cacontents, 'utf8') + after(async () => { + await fs.promises.unlink(cafile) + }) + const cas = await install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt')) + assert.strictEqual(cas.length, 2) + assert.notStrictEqual(cas[0], cas[1]) }) - t.tearDown(() => Promise.all([ - new Promise((resolve) => server.close(resolve)), - new Promise((resolve) => pserver.close(resolve)) - ])) - - const host = 'localhost' - await new Promise((resolve) => server.listen(0, host, resolve)) - const { port } = server.address() - await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) - const gyp = { - opts: { - proxy: `http://${host}:${port + 1}`, - noproxy: host - }, - version: '42' - } - const url = `http://${host}:${port}` - const res = await install.test.download(gyp, url) - t.strictEqual(await res.text(), 'ok') -}) - -test('download with missing cafile', async (t) => { - t.plan(1) - const gyp = { - opts: { cafile: 'no.such.file' } - } - try { - await install.test.download(gyp, {}, 'http://bad/') - } catch (e) { - t.ok(/no.such.file/.test(e.message)) - } -}) - -test('check certificate splitting', async (t) => { - const cafile = path.join(__dirname, 'fixtures/ca-bundle.crt') - const cacontents = certs['ca-bundle.crt'] - await fs.promises.writeFile(cafile, cacontents, 'utf8') - t.tearDown(async () => { - await fs.promises.unlink(cafile) + // only run this test if we are running a version of Node with predictable version path behavior + + it('download headers (actual)', async function () { + if (process.env.FAST_TEST || + process.release.name !== 'node' || + semver.prerelease(process.version) !== null || + semver.satisfies(process.version, '<10')) { + return this.skip('Skipping actual download of headers due to test environment configuration') + } + + this.timeout(300000) + + const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) + await util.promisify(rimraf)(expectedDir) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + log.level = 'warn' + await util.promisify(install)(prog, []) + + const data = await fs.promises.readFile(path.join(expectedDir, 'installVersion'), 'utf8') + assert.strictEqual(data, '11\n', 'correct installVersion') + + const list = await fs.promises.readdir(path.join(expectedDir, 'include/node')) + assert.ok(list.includes('common.gypi')) + assert.ok(list.includes('config.gypi')) + assert.ok(list.includes('node.h')) + assert.ok(list.includes('node_version.h')) + assert.ok(list.includes('openssl')) + assert.ok(list.includes('uv')) + assert.ok(list.includes('uv.h')) + assert.ok(list.includes('v8-platform.h')) + assert.ok(list.includes('v8.h')) + assert.ok(list.includes('zlib.h')) + + const lines = (await fs.promises.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8')).split('\n') + + // extract the 3 version parts from the defines to build a valid version string and + // and check them against our current env version + const version = ['major', 'minor', 'patch'].reduce((version, type) => { + const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`) + const line = lines.find((l) => re.test(l)) + const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR' + return `${version}${type !== 'major' ? '.' : 'v'}${i}` + }, '') + + assert.strictEqual(version, process.version) }) - const cas = await install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt')) - t.plan(2) - t.strictEqual(cas.length, 2) - t.notStrictEqual(cas[0], cas[1]) -}) - -// only run this test if we are running a version of Node with predictable version path behavior - -test('download headers (actual)', async (t) => { - if (process.env.FAST_TEST || - process.release.name !== 'node' || - semver.prerelease(process.version) !== null || - semver.satisfies(process.version, '<10')) { - return t.skip('Skipping actual download of headers due to test environment configuration') - } - - t.plan(12) - - const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) - await util.promisify(rimraf)(expectedDir) - - const prog = gyp() - prog.parseArgv([]) - prog.devDir = devDir - log.level = 'warn' - await util.promisify(install)(prog, []) - - const data = await fs.promises.readFile(path.join(expectedDir, 'installVersion'), 'utf8') - t.strictEqual(data, '9\n', 'correct installVersion') - - const list = await fs.promises.readdir(path.join(expectedDir, 'include/node')) - t.ok(list.includes('common.gypi')) - t.ok(list.includes('config.gypi')) - t.ok(list.includes('node.h')) - t.ok(list.includes('node_version.h')) - t.ok(list.includes('openssl')) - t.ok(list.includes('uv')) - t.ok(list.includes('uv.h')) - t.ok(list.includes('v8-platform.h')) - t.ok(list.includes('v8.h')) - t.ok(list.includes('zlib.h')) - - const lines = (await fs.promises.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8')).split('\n') - - // extract the 3 version parts from the defines to build a valid version string and - // and check them against our current env version - const version = ['major', 'minor', 'patch'].reduce((version, type) => { - const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`) - const line = lines.find((l) => re.test(l)) - const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR' - return `${version}${type !== 'major' ? '.' : 'v'}${i}` - }, '') - - t.strictEqual(version, process.version) }) diff --git a/node_modules/node-gyp/test/test-find-accessible-sync.js b/node_modules/node-gyp/test/test-find-accessible-sync.js index 0a2e584c4fb33..7edbc0c76446e 100644 --- a/node_modules/node-gyp/test/test-find-accessible-sync.js +++ b/node_modules/node-gyp/test/test-find-accessible-sync.js @@ -1,6 +1,7 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const path = require('path') const requireInject = require('require-inject') const configure = requireInject('../lib/configure', { @@ -27,58 +28,46 @@ const readableFiles = [ path.resolve(dir, readableFileInDir) ] -test('find accessible - empty array', function (t) { - t.plan(1) - - var candidates = [] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - single item array, readable', function (t) { - t.plan(1) - - var candidates = [readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFile)) -}) - -test('find accessible - single item array, readable in subdir', function (t) { - t.plan(1) - - var candidates = [readableFileInDir] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFileInDir)) -}) - -test('find accessible - single item array, unreadable', function (t) { - t.plan(1) - - var candidates = ['unreadable_file'] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - multi item array, no matches', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', 'unreadable_file'] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - multi item array, single match', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFile)) -}) - -test('find accessible - multi item array, return first match', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', anotherReadableFile, readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, anotherReadableFile)) +describe('find-accessible-sync', function () { + it('find accessible - empty array', function () { + var candidates = [] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, undefined) + }) + + it('find accessible - single item array, readable', function () { + var candidates = [readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, path.resolve(dir, readableFile)) + }) + + it('find accessible - single item array, readable in subdir', function () { + var candidates = [readableFileInDir] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, path.resolve(dir, readableFileInDir)) + }) + + it('find accessible - single item array, unreadable', function () { + var candidates = ['unreadable_file'] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, undefined) + }) + + it('find accessible - multi item array, no matches', function () { + var candidates = ['non_existent_file', 'unreadable_file'] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, undefined) + }) + + it('find accessible - multi item array, single match', function () { + var candidates = ['non_existent_file', readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, path.resolve(dir, readableFile)) + }) + + it('find accessible - multi item array, return first match', function () { + var candidates = ['non_existent_file', anotherReadableFile, readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, path.resolve(dir, anotherReadableFile)) + }) }) diff --git a/node_modules/node-gyp/test/test-find-node-directory.js b/node_modules/node-gyp/test/test-find-node-directory.js index fa6223c65d03c..ca299f6330646 100644 --- a/node_modules/node-gyp/test/test-find-node-directory.js +++ b/node_modules/node-gyp/test/test-find-node-directory.js @@ -1,119 +1,115 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const path = require('path') const findNodeDirectory = require('../lib/find-node-directory') const platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix', 'os400'] -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in a build tree where npm is installed in -// .... /deps/npm -test('test find-node-directory - node install', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj), - path.join('/x')) - } -}) +describe('find-node-directory', function () { + // we should find the directory based on the directory + // the script is running in and it should match the layout + // in a build tree where npm is installed in + // .... /deps/npm + it('test find-node-directory - node install', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + assert.strictEqual( + findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj), + path.join('/x')) + } + }) -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in an installed tree where npm is installed in -// .... /lib/node_modules/npm or .../node_modules/npm -// depending on the patform -test('test find-node-directory - node build', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - if (platforms[next] === 'win32') { - t.equal( - findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib', - processObj), path.join('/y')) - } else { - t.equal( - findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib', - processObj), path.join('/y')) + // we should find the directory based on the directory + // the script is running in and it should match the layout + // in an installed tree where npm is installed in + // .... /lib/node_modules/npm or .../node_modules/npm + // depending on the patform + it('test find-node-directory - node build', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + if (platforms[next] === 'win32') { + assert.strictEqual( + findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib', + processObj), path.join('/y')) + } else { + assert.strictEqual( + findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib', + processObj), path.join('/y')) + } } - } -}) + }) -// we should find the directory based on the execPath -// for node and match because it was in the bin directory -test('test find-node-directory - node in bin directory', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/x/y')) - } -}) + // we should find the directory based on the execPath + // for node and match because it was in the bin directory + it('test find-node-directory - node in bin directory', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + assert.strictEqual( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/x/y')) + } + }) -// we should find the directory based on the execPath -// for node and match because it was in the Release directory -test('test find-node-directory - node in build release dir', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj - if (platforms[next] === 'win32') { - processObj = { execPath: '/x/y/Release/node', platform: platforms[next] } - } else { - processObj = { - execPath: '/x/y/out/Release/node', - platform: platforms[next] + // we should find the directory based on the execPath + // for node and match because it was in the Release directory + it('test find-node-directory - node in build release dir', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj + if (platforms[next] === 'win32') { + processObj = { execPath: '/x/y/Release/node', platform: platforms[next] } + } else { + processObj = { + execPath: '/x/y/out/Release/node', + platform: platforms[next] + } } + + assert.strictEqual( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/x/y')) } + }) - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/x/y')) - } -}) + // we should find the directory based on the execPath + // for node and match because it was in the Debug directory + it('test find-node-directory - node in Debug release dir', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj + if (platforms[next] === 'win32') { + processObj = { execPath: '/a/b/Debug/node', platform: platforms[next] } + } else { + processObj = { execPath: '/a/b/out/Debug/node', platform: platforms[next] } + } -// we should find the directory based on the execPath -// for node and match because it was in the Debug directory -test('test find-node-directory - node in Debug release dir', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj - if (platforms[next] === 'win32') { - processObj = { execPath: '/a/b/Debug/node', platform: platforms[next] } - } else { - processObj = { execPath: '/a/b/out/Debug/node', platform: platforms[next] } + assert.strictEqual( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/a/b')) } + }) - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/a/b')) - } -}) - -// we should not find it as it will not match based on the execPath nor -// the directory from which the script is running -test('test find-node-directory - not found', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/z/y', platform: next } - t.equal(findNodeDirectory('/a/b/c/d', processObj), '') - } -}) + // we should not find it as it will not match based on the execPath nor + // the directory from which the script is running + it('test find-node-directory - not found', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/z/y', platform: next } + assert.strictEqual(findNodeDirectory('/a/b/c/d', processObj), '') + } + }) -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in a build tree where npm is installed in -// .... /deps/npm -// same test as above but make sure additional directory entries -// don't cause an issue -test('test find-node-directory - node install', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib', - processObj), path.join('/x/y/z/a/b/c')) - } + // we should find the directory based on the directory + // the script is running in and it should match the layout + // in a build tree where npm is installed in + // .... /deps/npm + // same test as above but make sure additional directory entries + // don't cause an issue + it('test find-node-directory - node install', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + assert.strictEqual( + findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib', + processObj), path.join('/x/y/z/a/b/c')) + } + }) }) diff --git a/node_modules/node-gyp/test/test-find-python.js b/node_modules/node-gyp/test/test-find-python.js index 67d0b2664f0b1..592c480f24fef 100644 --- a/node_modules/node-gyp/test/test-find-python.js +++ b/node_modules/node-gyp/test/test-find-python.js @@ -2,225 +2,212 @@ delete process.env.PYTHON -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const findPython = require('../lib/find-python') const execFile = require('child_process').execFile const PythonFinder = findPython.test.PythonFinder require('npmlog').level = 'warn' -test('find python', function (t) { - t.plan(4) - - findPython.test.findPython(null, function (err, found) { - t.strictEqual(err, null) - var proc = execFile(found, ['-V'], function (err, stdout, stderr) { - t.strictEqual(err, null) - t.ok(/Python 3/.test(stdout)) - t.strictEqual(stderr, '') +describe('find-python', function () { + it('find python', function () { + findPython.test.findPython(null, function (err, found) { + assert.strictEqual(err, null) + var proc = execFile(found, ['-V'], function (err, stdout, stderr) { + assert.strictEqual(err, null) + assert.ok(/Python 3/.test(stdout)) + assert.strictEqual(stderr, '') + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') }) -}) -function poison (object, property) { - function fail () { - console.error(Error(`Property ${property} should not have been accessed.`)) - process.abort() - } - var descriptor = { - configurable: false, - enumerable: false, - get: fail, - set: fail - } - Object.defineProperty(object, property, descriptor) -} - -function TestPythonFinder () { - PythonFinder.apply(this, arguments) -} -TestPythonFinder.prototype = Object.create(PythonFinder.prototype) -// Silence npmlog - remove for debugging -TestPythonFinder.prototype.log = { - silly: () => {}, - verbose: () => {}, - info: () => {}, - warn: () => {}, - error: () => {} -} -delete TestPythonFinder.prototype.env.NODE_GYP_FORCE_PYTHON - -test('find python - python', function (t) { - t.plan(6) - - var f = new TestPythonFinder('python', done) - f.execFile = function (program, args, opts, cb) { - f.execFile = function (program, args, opts, cb) { - poison(f, 'execFile') - t.strictEqual(program, '/path/python') - t.ok(/sys\.version_info/.test(args[1])) - cb(null, '3.9.1') - } - t.strictEqual(program, - process.platform === 'win32' ? '"python"' : 'python') - t.ok(/sys\.executable/.test(args[1])) - cb(null, '/path/python') + function poison (object, property) { + function fail () { + console.error(Error(`Property ${property} should not have been accessed.`)) + process.abort() + } + var descriptor = { + configurable: false, + enumerable: false, + get: fail, + set: fail + } + Object.defineProperty(object, property, descriptor) } - f.findPython() - function done (err, python) { - t.strictEqual(err, null) - t.strictEqual(python, '/path/python') + function TestPythonFinder () { + PythonFinder.apply(this, arguments) } -}) - -test('find python - python too old', function (t) { - t.plan(2) + TestPythonFinder.prototype = Object.create(PythonFinder.prototype) + // Silence npmlog - remove for debugging + TestPythonFinder.prototype.log = { + silly: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} + } + delete TestPythonFinder.prototype.env.NODE_GYP_FORCE_PYTHON - var f = new TestPythonFinder(null, done) - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { + it('find python - python', function () { + var f = new TestPythonFinder('python', done) + f.execFile = function (program, args, opts, cb) { + f.execFile = function (program, args, opts, cb) { + poison(f, 'execFile') + assert.strictEqual(program, '/path/python') + assert.ok(/sys\.version_info/.test(args[1])) + cb(null, '3.9.1') + } + assert.strictEqual(program, + process.platform === 'win32' ? '"python"' : 'python') + assert.ok(/sys\.executable/.test(args[1])) cb(null, '/path/python') - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(null, '2.3.4') - } else { - t.fail() } - } - f.findPython() - - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not supported/i.test(f.errorLog)) - } -}) - -test('find python - no python', function (t) { - t.plan(2) + f.findPython() - var f = new TestPythonFinder(null, done) - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(new Error('not a Python executable')) - } else { - t.fail() + function done (err, python) { + assert.strictEqual(err, null) + assert.strictEqual(python, '/path/python') } - } - f.findPython() + }) - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } -}) + it('find python - python too old', function () { + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(null, '/path/python') + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(null, '2.3.4') + } else { + assert.fail() + } + } + f.findPython() -test('find python - no python2, no python, unix', function (t) { - t.plan(2) + function done (err) { + assert.ok(/Could not find any Python/.test(err)) + assert.ok(/not supported/i.test(f.errorLog)) + } + }) - var f = new TestPythonFinder(null, done) - f.checkPyLauncher = t.fail - f.win = false + it('find python - no python', function () { + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + assert.fail() + } + } + f.findPython() - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else { - t.fail() + function done (err) { + assert.ok(/Could not find any Python/.test(err)) + assert.ok(/not in PATH/.test(f.errorLog)) } - } - f.findPython() + }) - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } -}) + it('find python - no python2, no python, unix', function () { + var f = new TestPythonFinder(null, done) + f.checkPyLauncher = assert.fail + f.win = false -test('find python - no python, use python launcher', function (t) { - t.plan(4) - - var f = new TestPythonFinder(null, done) - f.win = true - - f.execFile = function (program, args, opts, cb) { - if (program === 'py.exe') { - t.notEqual(args.indexOf('-3'), -1) - t.notEqual(args.indexOf('-c'), -1) - return cb(null, 'Z:\\snake.exe') - } - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (f.winDefaultLocations.includes(program)) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - if (program === 'Z:\\snake.exe') { - cb(null, '3.9.0') + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) } else { - t.fail() + assert.fail() } - } else { - t.fail() } - } - f.findPython() + f.findPython() - function done (err, python) { - t.strictEqual(err, null) - t.strictEqual(python, 'Z:\\snake.exe') - } -}) - -test('find python - no python, no python launcher, good guess', function (t) { - t.plan(2) + function done (err) { + assert.ok(/Could not find any Python/.test(err)) + assert.ok(/not in PATH/.test(f.errorLog)) + } + }) - var f = new TestPythonFinder(null, done) - f.win = true - const expectedProgram = f.winDefaultLocations[0] + it('find python - no python, use python launcher', function () { + var f = new TestPythonFinder(null, done) + f.win = true - f.execFile = function (program, args, opts, cb) { - if (program === 'py.exe') { - return cb(new Error('not found')) + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + assert.notStrictEqual(args.indexOf('-3'), -1) + assert.notStrictEqual(args.indexOf('-c'), -1) + return cb(null, 'Z:\\snake.exe') + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (f.winDefaultLocations.includes(program)) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + if (program === 'Z:\\snake.exe') { + cb(null, '3.9.0') + } else { + assert.fail() + } + } else { + assert.fail() + } } - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (program === expectedProgram && - /sys\.version_info/.test(args[args.length - 1])) { - cb(null, '3.7.3') - } else { - t.fail() + f.findPython() + + function done (err, python) { + assert.strictEqual(err, null) + assert.strictEqual(python, 'Z:\\snake.exe') } - } - f.findPython() + }) - function done (err, python) { - t.strictEqual(err, null) - t.ok(python === expectedProgram) - } -}) + it('find python - no python, no python launcher, good guess', function () { + var f = new TestPythonFinder(null, done) + f.win = true + const expectedProgram = f.winDefaultLocations[0] + + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + return cb(new Error('not found')) + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (program === expectedProgram && + /sys\.version_info/.test(args[args.length - 1])) { + cb(null, '3.7.3') + } else { + assert.fail() + } + } + f.findPython() -test('find python - no python, no python launcher, bad guess', function (t) { - t.plan(2) + function done (err, python) { + assert.strictEqual(err, null) + assert.ok(python === expectedProgram) + } + }) - var f = new TestPythonFinder(null, done) - f.win = true + it('find python - no python, no python launcher, bad guess', function () { + var f = new TestPythonFinder(null, done) + f.win = true - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(new Error('not a Python executable')) - } else { - t.fail() + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + assert.fail() + } } - } - f.findPython() + f.findPython() - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } + function done (err) { + assert.ok(/Could not find any Python/.test(err)) + assert.ok(/not in PATH/.test(f.errorLog)) + } + }) }) diff --git a/node_modules/node-gyp/test/test-find-visualstudio.js b/node_modules/node-gyp/test/test-find-visualstudio.js index 1327cf8841111..29d9a7dba5f55 100644 --- a/node_modules/node-gyp/test/test-find-visualstudio.js +++ b/node_modules/node-gyp/test/test-find-visualstudio.js @@ -1,6 +1,7 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const fs = require('fs') const path = require('path') const findVisualStudio = require('../lib/find-visualstudio') @@ -35,642 +36,635 @@ TestVisualStudioFinder.prototype.log = { error: () => {} } -test('VS2013', function (t) { - t.plan(4) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\MSBuild12\\MSBuild.exe', - path: 'C:\\VS2013', - sdk: null, - toolset: 'v120', - version: '12.0', - versionMajor: 12, - versionMinor: 0, - versionYear: 2013 +describe('find-visualstudio', function () { + it('VS2013', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\MSBuild12\\MSBuild.exe', + path: 'C:\\VS2013', + sdk: null, + toolset: 'v120', + version: '12.0', + versionMajor: 12, + versionMinor: 0, + versionYear: 2013 + }) }) - }) - finder.findVisualStudio2017OrNewer = (cb) => { - finder.parseData(new Error(), '', '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - continue - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\VS2013\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\MSBuild12\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) - } + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) } - return cb(new Error()) - } - finder.findVisualStudio() -}) - -test('VS2013 should not be found on new node versions', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder({ - major: 10, - minor: 0, - patch: 0 - }, null, (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') - }) - - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - continue - default: - t.fail(`unexpected search for registry value ${fullName}`) + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + assert.ok(true, `expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + assert.ok(true, `expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild12\\') + default: + assert.fail(`unexpected search for registry value ${fullName}`) + } } + return cb(new Error()) } - return cb(new Error()) - } - finder.findVisualStudio() -}) + finder.findVisualStudio() + }) -test('VS2015', function (t) { - t.plan(4) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\MSBuild14\\MSBuild.exe', - path: 'C:\\VS2015', - sdk: null, - toolset: 'v140', - version: '14.0', - versionMajor: 14, - versionMinor: 0, - versionYear: 2015 + it('VS2013 should not be found on new node versions', function () { + const finder = new TestVisualStudioFinder({ + major: 10, + minor: 0, + patch: 0 + }, null, (err, info) => { + assert.ok(/find .* Visual Studio/i.test(err), 'expect error') + assert.ok(!info, 'no data') }) - }) - finder.findVisualStudio2017OrNewer = (cb) => { - finder.parseData(new Error(), '', '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\VS2015\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\MSBuild14\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + default: + assert.fail(`unexpected search for registry value ${fullName}`) + } } + return cb(new Error()) } - return cb(new Error()) - } - finder.findVisualStudio() -}) - -test('error from PowerShell', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(new Error(), '', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.findVisualStudio() }) -}) - -test('empty output from PowerShell', function (t) { - t.plan(2) - const finder = new TestVisualStudioFinder(semverV1, null, null) + it('VS2015', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\MSBuild14\\MSBuild.exe', + path: 'C:\\VS2015', + sdk: null, + toolset: 'v140', + version: '14.0', + versionMajor: 14, + versionMinor: 0, + versionYear: 2015 + }) + }) - finder.parseData(null, '', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + assert.ok(true, `expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + assert.ok(true, `expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild14\\') + default: + assert.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() }) -}) - -test('output from PowerShell not JSON', function (t) { - t.plan(2) - const finder = new TestVisualStudioFinder(semverV1, null, null) + it('error from PowerShell', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - finder.parseData(null, 'AAAABBBB', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.parseData(new Error(), '', '', (info) => { + assert.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) -test('wrong JSON from PowerShell', function (t) { - t.plan(2) + it('empty output from PowerShell', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(null, '{}', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.parseData(null, '', '', (info) => { + assert.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) - -test('empty JSON from PowerShell', function (t) { - t.plan(2) - const finder = new TestVisualStudioFinder(semverV1, null, null) + it('output from PowerShell not JSON', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - finder.parseData(null, '[]', '', (info) => { - t.ok(/find .* Visual Studio/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.parseData(null, 'AAAABBBB', '', (info) => { + assert.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) - -test('future version', function (t) { - t.plan(3) - const finder = new TestVisualStudioFinder(semverV1, null, null) + it('wrong JSON from PowerShell', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - finder.parseData(null, JSON.stringify([{ - packages: [ - 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64', - 'Microsoft.VisualStudio.Component.Windows10SDK.17763', - 'Microsoft.VisualStudio.VC.MSBuild.Base' - ], - path: 'C:\\VS', - version: '9999.9999.9999.9999' - }]), '', (info) => { - t.ok(/unknown version/i.test(finder.errorLog[0]), 'expect error') - t.ok(/find .* Visual Studio/i.test(finder.errorLog[1]), 'expect error') - t.false(info, 'no data') + finder.parseData(null, '{}', '', (info) => { + assert.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) -test('single unusable VS2017', function (t) { - t.plan(3) + it('empty JSON from PowerShell', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - const finder = new TestVisualStudioFinder(semverV1, null, null) - - const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', (info) => { - t.ok(/checking/i.test(finder.errorLog[0]), 'expect error') - t.ok(/find .* Visual Studio/i.test(finder.errorLog[2]), 'expect error') - t.false(info, 'no data') + finder.parseData(null, '[]', '', (info) => { + assert.ok(/find .* Visual Studio/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) -test('minimal VS2017 Build Tools', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'BuildTools\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools', - sdk: '10.0.17134.0', - toolset: 'v141', - version: '15.9.28307.665', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 + it('future version', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, JSON.stringify([{ + packages: [ + 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64', + 'Microsoft.VisualStudio.Component.Windows10SDK.17763', + 'Microsoft.VisualStudio.VC.MSBuild.Base' + ], + path: 'C:\\VS', + version: '9999.9999.9999.9999' + }]), '', (info) => { + assert.ok(/unknown version/i.test(finder.errorLog[0]), 'expect error') + assert.ok(/find .* Visual Studio/i.test(finder.errorLog[1]), 'expect error') + assert.ok(!info, 'no data') }) }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2017_BuildTools_minimal.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + it('single unusable VS2017', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) -test('VS2017 Community with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'Community\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', - sdk: '10.0.17763.0', - toolset: 'v141', - version: '15.9.28307.665', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', (info) => { + assert.ok(/checking/i.test(finder.errorLog[0]), 'expect error') + assert.ok(/find .* Visual Studio/i.test(finder.errorLog[2]), 'expect error') + assert.ok(!info, 'no data') }) }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2017_Community_workload.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) - -test('VS2017 Express', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'WDExpress\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress', - sdk: '10.0.17763.0', - toolset: 'v141', - version: '15.9.28307.858', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 + it('minimal VS2017 Build Tools', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'BuildTools\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) }) - }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', 'VS_2017_Express.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() + }) -test('VS2019 Preview with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'Preview\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview', - sdk: '10.0.17763.0', - toolset: 'v142', - version: '16.0.28608.199', - versionMajor: 16, - versionMinor: 0, - versionYear: 2019 + it('VS2017 Community with C++ workload', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'Community\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) }) - }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_Preview.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() + }) -test('minimal VS2019 Build Tools', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'BuildTools\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', - sdk: '10.0.17134.0', - toolset: 'v142', - version: '16.1.28922.388', - versionMajor: 16, - versionMinor: 1, - versionYear: 2019 + it('VS2017 Express', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'WDExpress\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.858', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) }) - }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_BuildTools_minimal.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Express.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() + }) -test('VS2019 Community with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community', - sdk: '10.0.17763.0', - toolset: 'v142', - version: '16.1.28922.388', - versionMajor: 16, - versionMinor: 1, - versionYear: 2019 + it('VS2019 Preview with C++ workload', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Preview\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.0.28608.199', + versionMajor: 16, + versionMinor: 0, + versionYear: 2019 + }) }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_Community_workload.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + it('minimal VS2019 Build Tools', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'BuildTools\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) -function allVsVersions (t, finder) { - finder.findVisualStudio2017OrNewer = (cb) => { - const data0 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Unusable.txt'))) - const data1 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_BuildTools_minimal.txt'))) - const data2 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Community_workload.txt'))) - const data3 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Express.txt'))) - const data4 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_Preview.txt'))) - const data5 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_BuildTools_minimal.txt'))) - const data6 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_Community_workload.txt'))) - const data = JSON.stringify(data0.concat(data1, data2, data3, data4, - data5, data6)) - finder.parseData(null, data, '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - continue - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - return cb(null, 'C:\\VS2013\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': - return cb(null, 'C:\\MSBuild12\\') - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - return cb(null, 'C:\\VS2015\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': - return cb(null, 'C:\\MSBuild14\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) - } + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) } - return cb(new Error()) - } -} + finder.findVisualStudio() + }) -test('fail when looking for invalid path', function (t) { - t.plan(2) + it('VS2019 Community with C++ workload', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) - const finder = new TestVisualStudioFinder(semverV1, 'AABB', (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2013 by version number', function (t) { - t.plan(2) + it('VS2022 Preview with C++ workload', function () { + const msBuildPath = process.arch === 'arm64' + ? 'C:\\Program Files\\Microsoft Visual Studio\\2022\\' + + 'Community\\MSBuild\\Current\\Bin\\arm64\\MSBuild.exe' + : 'C:\\Program Files\\Microsoft Visual Studio\\2022\\' + + 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe' + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: msBuildPath, + path: + 'C:\\Program Files\\Microsoft Visual Studio\\2022\\Community', + sdk: '10.0.22621.0', + toolset: 'v143', + version: '17.4.33213.308', + versionMajor: 17, + versionMinor: 4, + versionYear: 2022 + }) + }) - const finder = new TestVisualStudioFinder(semverV1, '2013', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2013) + poison(finder, 'regSearchKeys') + finder.msBuildPathExists = (path) => { + return true + } + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2022_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2013 by installation path', function (t) { - t.plan(2) + function allVsVersions (finder) { + finder.findVisualStudio2017OrNewer = (cb) => { + const data0 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Unusable.txt'))) + const data1 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt'))) + const data2 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt'))) + const data3 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Express.txt'))) + const data4 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt'))) + const data5 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt'))) + const data6 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt'))) + const data7 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2022_Community_workload.txt'))) + const data = JSON.stringify(data0.concat(data1, data2, data3, data4, + data5, data6, data7)) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + continue + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild12\\') + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild14\\') + default: + assert.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + } - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2013', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2013') + it('fail when looking for invalid path', function () { + const finder = new TestVisualStudioFinder(semverV1, 'AABB', (err, info) => { + assert.ok(/find .* Visual Studio/i.test(err), 'expect error') + assert.ok(!info, 'no data') }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + allVsVersions(finder) + finder.findVisualStudio() + }) -test('look for VS2015 by version number', function (t) { - t.plan(2) + it('look for VS2013 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2013', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2013) + }) - const finder = new TestVisualStudioFinder(semverV1, '2015', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2015) + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('look for VS2013 by installation path', function () { + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2013', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, 'C:\\VS2013') + }) -test('look for VS2015 by installation path', function (t) { - t.plan(2) + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') + it('look for VS2015 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2015', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2015) }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2017 by version number', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, '2017', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2017) + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('look for VS2015 by installation path', function () { + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, 'C:\\VS2015') + }) -test('look for VS2017 by installation path', function (t) { - t.plan(2) + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community') + it('look for VS2017 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2017', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2017) }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2019 by version number', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, '2019', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2019) + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2019 by installation path', function (t) { - t.plan(2) + it('look for VS2017 by installation path', function () { + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community') + }) + + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + it('look for VS2019 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2019', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2019) }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + allVsVersions(finder) + finder.findVisualStudio() + }) -test('msvs_version match should be case insensitive', function (t) { - t.plan(2) + it('look for VS2019 by installation path', function () { + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, - 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + it('look for VS2022 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2022', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2022) }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('latest version should be found by default', function (t) { - t.plan(2) + finder.msBuildPathExists = (path) => { + return true + } - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2019) + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('msvs_version match should be case insensitive', function () { + const finder = new TestVisualStudioFinder(semverV1, + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(finder) + finder.findVisualStudio() + }) -test('run on a usable VS Command Prompt', function (t) { - t.plan(2) + it('latest version should be found by default', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2022) + }) - process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' - // VSINSTALLDIR is not defined on Visual C++ Build Tools 2015 - delete process.env.VSINSTALLDIR + finder.msBuildPathExists = (path) => { + return true + } - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('run on a usable VS Command Prompt', function () { + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + // VSINSTALLDIR is not defined on Visual C++ Build Tools 2015 + delete process.env.VSINSTALLDIR -test('VCINSTALLDIR match should be case insensitive', function (t) { - t.plan(2) - - process.env.VCINSTALLDIR = - 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS\\VC' + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, 'C:\\VS2015') + }) - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('run on a unusable VS Command Prompt', function (t) { - t.plan(2) + it('VCINSTALLDIR match should be case insensitive', function () { + process.env.VCINSTALLDIR = + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS\\VC' - process.env.VCINSTALLDIR = - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildToolsUnusable\\VC' + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('run on a unusable VS Command Prompt', function () { + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildToolsUnusable\\VC' -test('run on a VS Command Prompt with matching msvs_version', function (t) { - t.plan(2) + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.ok(/find .* Visual Studio/i.test(err), 'expect error') + assert.ok(!info, 'no data') + }) - process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') - }) + it('run on a VS Command Prompt with matching msvs_version', function () { + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' - allVsVersions(t, finder) - finder.findVisualStudio() -}) + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, 'C:\\VS2015') + }) -test('run on a VS Command Prompt with mismatched msvs_version', function (t) { - t.plan(2) + allVsVersions(finder) + finder.findVisualStudio() + }) - process.env.VCINSTALLDIR = - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC' + it('run on a VS Command Prompt with mismatched msvs_version', function () { + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC' - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') - }) + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + assert.ok(/find .* Visual Studio/i.test(err), 'expect error') + assert.ok(!info, 'no data') + }) - allVsVersions(t, finder) - finder.findVisualStudio() + allVsVersions(finder) + finder.findVisualStudio() + }) }) diff --git a/node_modules/node-gyp/test/test-install.js b/node_modules/node-gyp/test/test-install.js index 5039dc992eb47..235acf5231396 100644 --- a/node_modules/node-gyp/test/test-install.js +++ b/node_modules/node-gyp/test/test-install.js @@ -1,46 +1,137 @@ 'use strict' -const { test } = require('tap') -const { test: { install } } = require('../lib/install') +const { describe, it, after } = require('mocha') +const assert = require('assert') +const path = require('path') +const os = require('os') +const util = require('util') +const { test: { download, install } } = require('../lib/install') +const rimraf = require('rimraf') +const gyp = require('../lib/node-gyp') const log = require('npmlog') +const semver = require('semver') +const stream = require('stream') +const streamPipeline = util.promisify(stream.pipeline) log.level = 'error' // we expect a warning -test('EACCES retry once', async (t) => { - t.plan(3) - - const fs = { - promises: { - stat (_) { - const err = new Error() - err.code = 'EACCES' - t.ok(true) - throw err +describe('install', function () { + it('EACCES retry once', async () => { + const fs = { + promises: { + stat (_) { + const err = new Error() + err.code = 'EACCES' + assert.ok(true) + throw err + } } } - } - const Gyp = { - devDir: __dirname, - opts: { - ensure: true - }, - commands: { - install (argv, cb) { - install(fs, Gyp, argv).then(cb, cb) + const Gyp = { + devDir: __dirname, + opts: { + ensure: true }, - remove (_, cb) { - cb() + commands: { + install (argv, cb) { + install(fs, Gyp, argv).then(cb, cb) + }, + remove (_, cb) { + cb() + } } } - } - try { - await install(fs, Gyp, []) - } catch (err) { - t.ok(true) - if (/"pre" versions of node cannot be installed/.test(err.message)) { - t.ok(true) + try { + await install(fs, Gyp, []) + } catch (err) { + assert.ok(true) + if (/"pre" versions of node cannot be installed/.test(err.message)) { + assert.ok(true) + } + } + }) + + // only run these tests if we are running a version of Node with predictable version path behavior + const skipParallelInstallTests = process.env.FAST_TEST || + process.release.name !== 'node' || + semver.prerelease(process.version) !== null || + semver.satisfies(process.version, '<10') + + async function parallelInstallsTest (test, fs, devDir, prog) { + if (skipParallelInstallTests) { + return test.skip('Skipping parallel installs test due to test environment configuration') } + + after(async () => { + await util.promisify(rimraf)(devDir) + }) + + const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) + await util.promisify(rimraf)(expectedDir) + + await Promise.all([ + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []) + ]) } + + it('parallel installs (ensure=true)', async function () { + this.timeout(600000) + + const fs = require('graceful-fs') + const devDir = await util.promisify(fs.mkdtemp)(path.join(os.tmpdir(), 'node-gyp-test-')) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + prog.opts.ensure = true + log.level = 'warn' + + await parallelInstallsTest(this, fs, devDir, prog) + }) + + it('parallel installs (ensure=false)', async function () { + this.timeout(600000) + + const fs = require('graceful-fs') + const devDir = await util.promisify(fs.mkdtemp)(path.join(os.tmpdir(), 'node-gyp-test-')) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + prog.opts.ensure = false + log.level = 'warn' + + await parallelInstallsTest(this, fs, devDir, prog) + }) + + it('parallel installs (tarball)', async function () { + this.timeout(600000) + + const fs = require('graceful-fs') + const devDir = await util.promisify(fs.mkdtemp)(path.join(os.tmpdir(), 'node-gyp-test-')) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + prog.opts.tarball = path.join(devDir, 'node-headers.tar.gz') + log.level = 'warn' + + await streamPipeline( + (await download(prog, `https://nodejs.org/dist/${process.version}/node-${process.version}.tar.gz`)).body, + fs.createWriteStream(prog.opts.tarball) + ) + + await parallelInstallsTest(this, fs, devDir, prog) + }) }) diff --git a/node_modules/node-gyp/test/test-options.js b/node_modules/node-gyp/test/test-options.js index 8a634f0e0955d..24e79c80a1843 100644 --- a/node_modules/node-gyp/test/test-options.js +++ b/node_modules/node-gyp/test/test-options.js @@ -1,42 +1,41 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const gyp = require('../lib/node-gyp') -test('options in environment', (t) => { - t.plan(1) +describe('options', function () { + it('options in environment', () => { + // `npm test` dumps a ton of npm_config_* variables in the environment. + Object.keys(process.env) + .filter((key) => /^npm_config_/.test(key)) + .forEach((key) => { delete process.env[key] }) - // `npm test` dumps a ton of npm_config_* variables in the environment. - Object.keys(process.env) - .filter((key) => /^npm_config_/.test(key)) - .forEach((key) => { delete process.env[key] }) + // in some platforms, certain keys are stubborn and cannot be removed + const keys = Object.keys(process.env) + .filter((key) => /^npm_config_/.test(key)) + .map((key) => key.substring('npm_config_'.length)) + .concat('argv', 'x') - // in some platforms, certain keys are stubborn and cannot be removed - const keys = Object.keys(process.env) - .filter((key) => /^npm_config_/.test(key)) - .map((key) => key.substring('npm_config_'.length)) - .concat('argv', 'x') + // Zero-length keys should get filtered out. + process.env.npm_config_ = '42' + // Other keys should get added. + process.env.npm_config_x = '42' + // Except loglevel. + process.env.npm_config_loglevel = 'debug' - // Zero-length keys should get filtered out. - process.env.npm_config_ = '42' - // Other keys should get added. - process.env.npm_config_x = '42' - // Except loglevel. - process.env.npm_config_loglevel = 'debug' + const g = gyp() + g.parseArgv(['rebuild']) // Also sets opts.argv. - const g = gyp() - g.parseArgv(['rebuild']) // Also sets opts.argv. + assert.deepStrictEqual(Object.keys(g.opts).sort(), keys.sort()) + }) - t.deepEqual(Object.keys(g.opts).sort(), keys.sort()) -}) - -test('options with spaces in environment', (t) => { - t.plan(1) - - process.env.npm_config_force_process_config = 'true' + it('options with spaces in environment', () => { + process.env.npm_config_force_process_config = 'true' - const g = gyp() - g.parseArgv(['rebuild']) // Also sets opts.argv. + const g = gyp() + g.parseArgv(['rebuild']) // Also sets opts.argv. - t.equal(g.opts['force-process-config'], 'true') + assert.strictEqual(g.opts['force-process-config'], 'true') + }) }) diff --git a/node_modules/node-gyp/test/test-process-release.js b/node_modules/node-gyp/test/test-process-release.js index c3ee0703c532f..0f40666473028 100644 --- a/node_modules/node-gyp/test/test-process-release.js +++ b/node_modules/node-gyp/test/test-process-release.js @@ -1,434 +1,401 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const processRelease = require('../lib/process-release') -test('test process release - process.version = 0.8.20', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.8.20', null) - - t.equal(release.semver.version, '0.8.20') - delete release.semver - - t.deepEqual(release, { - version: '0.8.20', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.8.20/', - tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt', - versionDir: '0.8.20', - ia32: { libUrl: 'https://nodejs.org/dist/v0.8.20/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.8.20/arm64/node.lib', libPath: 'arm64/node.lib' } +describe('process-release', function () { + it('test process release - process.version = 0.8.20', function () { + var release = processRelease([], { opts: {} }, 'v0.8.20', null) + + assert.strictEqual(release.semver.version, '0.8.20') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.8.20', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.8.20/', + tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt', + versionDir: '0.8.20', + ia32: { libUrl: 'https://nodejs.org/dist/v0.8.20/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.8.20/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -test('test process release - process.version = 0.10.21', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.21', null) - - t.equal(release.semver.version, '0.10.21') - delete release.semver - - t.deepEqual(release, { - version: '0.10.21', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.21/', - tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt', - versionDir: '0.10.21', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.21/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.21/arm64/node.lib', libPath: 'arm64/node.lib' } + it('test process release - process.version = 0.10.21', function () { + var release = processRelease([], { opts: {} }, 'v0.10.21', null) + + assert.strictEqual(release.semver.version, '0.10.21') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.10.21', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.21/', + tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt', + versionDir: '0.10.21', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.21/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.21/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -// prior to -headers.tar.gz -test('test process release - process.version = 0.12.9', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.12.9', null) - - t.equal(release.semver.version, '0.12.9') - delete release.semver - - t.deepEqual(release, { - version: '0.12.9', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.12.9/', - tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt', - versionDir: '0.12.9', - ia32: { libUrl: 'https://nodejs.org/dist/v0.12.9/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.12.9/arm64/node.lib', libPath: 'arm64/node.lib' } + // prior to -headers.tar.gz + it('test process release - process.version = 0.12.9', function () { + var release = processRelease([], { opts: {} }, 'v0.12.9', null) + + assert.strictEqual(release.semver.version, '0.12.9') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.12.9', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.12.9/', + tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt', + versionDir: '0.12.9', + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.9/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.9/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -// prior to -headers.tar.gz -test('test process release - process.version = 0.10.41', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.41', null) - - t.equal(release.semver.version, '0.10.41') - delete release.semver - - t.deepEqual(release, { - version: '0.10.41', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.41/', - tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt', - versionDir: '0.10.41', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.41/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.41/arm64/node.lib', libPath: 'arm64/node.lib' } + // prior to -headers.tar.gz + it('test process release - process.version = 0.10.41', function () { + var release = processRelease([], { opts: {} }, 'v0.10.41', null) + + assert.strictEqual(release.semver.version, '0.10.41') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.10.41', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.41/', + tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt', + versionDir: '0.10.41', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.41/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.41/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -// has -headers.tar.gz -test('test process release - process.release ~ node@0.10.42', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.42', null) - - t.equal(release.semver.version, '0.10.42') - delete release.semver - - t.deepEqual(release, { - version: '0.10.42', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.42/', - tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt', - versionDir: '0.10.42', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.42/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.42/arm64/node.lib', libPath: 'arm64/node.lib' } + // has -headers.tar.gz + it('test process release - process.release ~ node@0.10.42', function () { + var release = processRelease([], { opts: {} }, 'v0.10.42', null) + + assert.strictEqual(release.semver.version, '0.10.42') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.10.42', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.42/', + tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt', + versionDir: '0.10.42', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.42/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.42/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -// has -headers.tar.gz -test('test process release - process.release ~ node@0.12.10', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.12.10', null) - - t.equal(release.semver.version, '0.12.10') - delete release.semver - - t.deepEqual(release, { - version: '0.12.10', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.12.10/', - tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt', - versionDir: '0.12.10', - ia32: { libUrl: 'https://nodejs.org/dist/v0.12.10/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.12.10/arm64/node.lib', libPath: 'arm64/node.lib' } + // has -headers.tar.gz + it('test process release - process.release ~ node@0.12.10', function () { + var release = processRelease([], { opts: {} }, 'v0.12.10', null) + + assert.strictEqual(release.semver.version, '0.12.10') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.12.10', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.12.10/', + tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt', + versionDir: '0.12.10', + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.10/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.10/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -test('test process release - process.release ~ node@4.1.23', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + it('test process release - process.release ~ node@4.1.23', function () { + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v4.1.23/', + tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v4.1.23/', - tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - process.release ~ node@4.1.23 / corp build', function () { + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://some.custom.location/', + tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://some.custom.location/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://some.custom.location/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://some.custom.location/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://some.custom.location/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) -}) -test('test process release - process.release ~ node@4.1.23 / corp build', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz' + it('test process release - process.release ~ node@12.8.0 Windows', function () { + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib' + }) + + assert.strictEqual(release.semver.version, '12.8.0') + delete release.semver + + assert.deepStrictEqual(release, { + version: '12.8.0', + name: 'node', + baseUrl: 'https://nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://some.custom.location/', - tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://some.custom.location/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://some.custom.location/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://some.custom.location/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://some.custom.location/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - process.release ~ node@12.8.0 Windows ARM64', function () { + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib' + }) + + assert.strictEqual(release.semver.version, '12.8.0') + delete release.semver + + assert.deepStrictEqual(release, { + version: '12.8.0', + name: 'node', + baseUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) -}) -test('test process release - process.release ~ node@12.8.0 Windows', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v12.8.0', { - name: 'node', - sourceUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', - headersUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib' - }) - - t.equal(release.semver.version, '12.8.0') - delete release.semver - - t.deepEqual(release, { - version: '12.8.0', - name: 'node', - baseUrl: 'https://nodejs.org/download/release/v12.8.0/', - tarballUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/release/v12.8.0/SHASUMS256.txt', - versionDir: '12.8.0', - ia32: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - process.release ~ node@4.1.23 --target=0.10.40', function () { + var release = processRelease([], { opts: { target: '0.10.40' } }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '0.10.40') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.10.40', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.40/', + tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt', + versionDir: '0.10.40', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.40/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.40/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -test('test process release - process.release ~ node@12.8.0 Windows ARM64', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v12.8.0', { - name: 'node', - sourceUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', - headersUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib' + it('test process release - process.release ~ node@4.1.23 --dist-url=https://foo.bar/baz', function () { + var release = processRelease([], { opts: { 'dist-url': 'https://foo.bar/baz' } }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://foo.bar/baz/v4.1.23/', + tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) - t.equal(release.semver.version, '12.8.0') - delete release.semver - - t.deepEqual(release, { - version: '12.8.0', - name: 'node', - baseUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/', - tarballUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - shasumsUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/SHASUMS256.txt', - versionDir: '12.8.0', - ia32: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - process.release ~ frankenstein@4.1.23', function () { + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'frankenstein', + headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'frankenstein', + baseUrl: 'https://frankensteinjs.org/dist/v4.1.23/', + tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt', + versionDir: 'frankenstein-4.1.23', + ia32: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } + }) }) -}) -test('test process release - process.release ~ node@4.1.23 --target=0.10.40', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { target: '0.10.40' } }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + it('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function () { + var release = processRelease([], { opts: { 'dist-url': 'http://foo.bar/baz/' } }, 'v4.1.23', { + name: 'frankenstein', + headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'frankenstein', + baseUrl: 'http://foo.bar/baz/v4.1.23/', + tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', + shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt', + versionDir: 'frankenstein-4.1.23', + ia32: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } + }) }) - t.equal(release.semver.version, '0.10.40') - delete release.semver - - t.deepEqual(release, { - version: '0.10.40', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.40/', - tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt', - versionDir: '0.10.40', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.40/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.40/arm64/node.lib', libPath: 'arm64/node.lib' } + it('test process release - process.release ~ node@4.0.0-rc.4', function () { + var release = processRelease([], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.0.0-rc.4') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) -}) - -test('test process release - process.release ~ node@4.1.23 --dist-url=https://foo.bar/baz', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { 'dist-url': 'https://foo.bar/baz' } }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://foo.bar/baz/v4.1.23/', - tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ frankenstein@4.1.23', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'frankenstein', - headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'frankenstein', - baseUrl: 'https://frankensteinjs.org/dist/v4.1.23/', - tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt', - versionDir: 'frankenstein-4.1.23', - ia32: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, - x64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, - arm64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } - }) -}) - -test('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { 'dist-url': 'http://foo.bar/baz/' } }, 'v4.1.23', { - name: 'frankenstein', - headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'frankenstein', - baseUrl: 'http://foo.bar/baz/v4.1.23/', - tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', - shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt', - versionDir: 'frankenstein-4.1.23', - ia32: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, - x64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, - arm64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } - }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function (t) { - t.plan(2) + it('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function () { // note the missing 'v' on the arg, it should normalise when checking - // whether we're on the default or not - var release = processRelease(['4.0.0-rc.4'], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + // whether we're on the default or not + var release = processRelease(['4.0.0-rc.4'], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.0.0-rc.4') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function (t) { - t.plan(2) + it('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function () { // additional arguments can be passed in on the commandline that should be ignored if they - // are not specifying a valid version @ position 0 - var release = processRelease(['this is no version!'], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + // are not specifying a valid version @ position 0 + var release = processRelease(['this is no version!'], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.0.0-rc.4') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - NODEJS_ORG_MIRROR', function () { + process.env.NODEJS_ORG_MIRROR = 'http://foo.bar' + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'http://foo.bar/v4.1.23/', + tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'http://foo.bar/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'http://foo.bar/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'http://foo.bar/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) + + delete process.env.NODEJS_ORG_MIRROR }) }) - -test('test process release - NODEJS_ORG_MIRROR', function (t) { - t.plan(2) - - process.env.NODEJS_ORG_MIRROR = 'http://foo.bar' - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'http://foo.bar/v4.1.23/', - tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'http://foo.bar/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'http://foo.bar/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'http://foo.bar/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) - - delete process.env.NODEJS_ORG_MIRROR -}) diff --git a/node_modules/node-gyp/update-gyp.py b/node_modules/node-gyp/update-gyp.py index 19524bd6a7ad1..70e2d100288ec 100755 --- a/node_modules/node-gyp/update-gyp.py +++ b/node_modules/node-gyp/update-gyp.py @@ -49,7 +49,7 @@ def safe_extract(tar, path=".", members=None, *, numeric_owner=False): if not is_within_directory(path, member_path): raise Exception("Attempted Path Traversal in Tar File") - tar.extractall(path, members, numeric_owner) + tar.extractall(path, members, numeric_owner=numeric_owner) safe_extract(tar_ref, unzip_target) diff --git a/package-lock.json b/package-lock.json index be6547962b5fa..cd11ee1e959d5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -123,7 +123,7 @@ "minipass": "^5.0.0", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", - "node-gyp": "^9.3.1", + "node-gyp": "^9.4.0", "nopt": "^7.1.0", "npm-audit-report": "^5.0.0", "npm-install-checks": "^6.1.1", @@ -1184,12 +1184,6 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/@gar/promisify": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", - "integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==", - "inBundle": true - }, "node_modules/@google-automations/git-file-utils": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/@google-automations/git-file-utils/-/git-file-utils-1.2.6.tgz", @@ -2413,20 +2407,6 @@ "resolved": "mock-registry", "link": true }, - "node_modules/@npmcli/move-file": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-2.0.1.tgz", - "integrity": "sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ==", - "deprecated": "This functionality has been moved to @npmcli/fs", - "inBundle": true, - "dependencies": { - "mkdirp": "^1.0.4", - "rimraf": "^3.0.2" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/@npmcli/name-from-folder": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-2.0.0.tgz", @@ -5417,6 +5397,12 @@ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "dev": true }, + "node_modules/exponential-backoff": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.1.tgz", + "integrity": "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==", + "inBundle": true + }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -6546,12 +6532,6 @@ "node": ">=8" } }, - "node_modules/infer-owner": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", - "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", - "inBundle": true - }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -9227,15 +9207,16 @@ } }, "node_modules/node-gyp": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-9.3.1.tgz", - "integrity": "sha512-4Q16ZCqq3g8awk6UplT7AuxQ35XN4R/yf/+wSAwcBUAjg7l58RTactWaP8fIDTi0FzI7YcVLujwExakZlfWkXg==", + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-9.4.0.tgz", + "integrity": "sha512-dMXsYP6gc9rRbejLXmTbVRYjAHw7ppswsKyMxuxJxxOHzluIO1rGp9TOQgjFJ+2MCqcOcQTOPB/8Xwhr+7s4Eg==", "inBundle": true, "dependencies": { "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", "glob": "^7.1.4", "graceful-fs": "^4.2.6", - "make-fetch-happen": "^10.0.3", + "make-fetch-happen": "^11.0.3", "nopt": "^6.0.0", "npmlog": "^6.0.0", "rimraf": "^3.0.2", @@ -9250,19 +9231,6 @@ "node": "^12.13 || ^14.13 || >=16" } }, - "node_modules/node-gyp/node_modules/@npmcli/fs": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-2.1.2.tgz", - "integrity": "sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ==", - "inBundle": true, - "dependencies": { - "@gar/promisify": "^1.1.3", - "semver": "^7.3.5" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/node-gyp/node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -9292,87 +9260,6 @@ "concat-map": "0.0.1" } }, - "node_modules/node-gyp/node_modules/cacache": { - "version": "16.1.3", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.3.tgz", - "integrity": "sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ==", - "inBundle": true, - "dependencies": { - "@npmcli/fs": "^2.1.0", - "@npmcli/move-file": "^2.0.0", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^8.0.1", - "infer-owner": "^1.0.4", - "lru-cache": "^7.7.1", - "minipass": "^3.1.6", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^9.0.0", - "tar": "^6.1.11", - "unique-filename": "^2.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "inBundle": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/node-gyp/node_modules/cacache/node_modules/glob": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", - "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", - "inBundle": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^5.0.1", - "once": "^1.3.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/node-gyp/node_modules/cacache/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "inBundle": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/node-gyp/node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "inBundle": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/node-gyp/node_modules/gauge": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", @@ -9412,33 +9299,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/node-gyp/node_modules/make-fetch-happen": { - "version": "10.2.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz", - "integrity": "sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w==", - "inBundle": true, - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^16.1.0", - "http-cache-semantics": "^4.1.0", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^3.1.6", - "minipass-collect": "^1.0.2", - "minipass-fetch": "^2.0.3", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^9.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/node-gyp/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -9451,35 +9311,6 @@ "node": "*" } }, - "node_modules/node-gyp/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "inBundle": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/node-gyp/node_modules/minipass-fetch": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.1.2.tgz", - "integrity": "sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA==", - "inBundle": true, - "dependencies": { - "minipass": "^3.1.6", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - } - }, "node_modules/node-gyp/node_modules/nopt": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-6.0.0.tgz", @@ -9530,42 +9361,6 @@ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "inBundle": true }, - "node_modules/node-gyp/node_modules/ssri": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-9.0.1.tgz", - "integrity": "sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q==", - "inBundle": true, - "dependencies": { - "minipass": "^3.1.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/unique-filename": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", - "integrity": "sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A==", - "inBundle": true, - "dependencies": { - "unique-slug": "^3.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/unique-slug": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-3.0.0.tgz", - "integrity": "sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w==", - "inBundle": true, - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/node-gyp/node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/package.json b/package.json index c676a5fd2bba6..b8904d6c77494 100644 --- a/package.json +++ b/package.json @@ -90,7 +90,7 @@ "minipass": "^5.0.0", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", - "node-gyp": "^9.3.1", + "node-gyp": "^9.4.0", "nopt": "^7.1.0", "npm-audit-report": "^5.0.0", "npm-install-checks": "^6.1.1", From 0a7d473f9f1d59281405b81129d700410896972a Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:41:50 -0700 Subject: [PATCH 05/16] deps: pacote@15.2.0 --- node_modules/pacote/lib/fetcher.js | 3 ++- node_modules/pacote/lib/registry.js | 5 ++++- node_modules/pacote/lib/util/cache-dir.js | 5 ++++- node_modules/pacote/package.json | 2 +- package-lock.json | 8 ++++---- package.json | 2 +- 6 files changed, 16 insertions(+), 9 deletions(-) diff --git a/node_modules/pacote/lib/fetcher.js b/node_modules/pacote/lib/fetcher.js index 6694a57f6af36..f961a45c7d346 100644 --- a/node_modules/pacote/lib/fetcher.js +++ b/node_modules/pacote/lib/fetcher.js @@ -61,7 +61,8 @@ class FetcherBase { // by adding/modifying the integrity value. this.opts = { ...opts } - this.cache = opts.cache || cacheDir() + this.cache = opts.cache || cacheDir().cacache + this.tufCache = opts.tufCache || cacheDir().tufcache this.resolved = opts.resolved || null // default to caching/verifying with sha512, that's what we usually have diff --git a/node_modules/pacote/lib/registry.js b/node_modules/pacote/lib/registry.js index 625bedc9a7736..34d9b2b87f3f3 100644 --- a/node_modules/pacote/lib/registry.js +++ b/node_modules/pacote/lib/registry.js @@ -295,7 +295,10 @@ class RegistryFetcher extends Fetcher { // // Publish attestations are signed with a keyid so we need to // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys` - const options = { keySelector: publicKey ? () => publicKey.pemkey : undefined } + const options = { + tufCachePath: this.tufCache, + keySelector: publicKey ? () => publicKey.pemkey : undefined, + } await sigstore.verify(bundle, null, options) } catch (e) { throw Object.assign(new Error( diff --git a/node_modules/pacote/lib/util/cache-dir.js b/node_modules/pacote/lib/util/cache-dir.js index 4236213edd409..ac83b1793f199 100644 --- a/node_modules/pacote/lib/util/cache-dir.js +++ b/node_modules/pacote/lib/util/cache-dir.js @@ -8,5 +8,8 @@ module.exports = (fakePlatform = false) => { const platform = fakePlatform || process.platform const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm' const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home - return resolve(cacheRoot, cacheExtra, '_cacache') + return { + cacache: resolve(cacheRoot, cacheExtra, '_cacache'), + tufcache: resolve(cacheRoot, cacheExtra, '_tuf'), + } } diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index 48f2bb0ac3fee..bc8d984704af5 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "15.1.3", + "version": "15.2.0", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { diff --git a/package-lock.json b/package-lock.json index cd11ee1e959d5..7651a93dfd2ac 100644 --- a/package-lock.json +++ b/package-lock.json @@ -134,7 +134,7 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^15.1.3", + "pacote": "^15.2.0", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", @@ -10098,9 +10098,9 @@ } }, "node_modules/pacote": { - "version": "15.1.3", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-15.1.3.tgz", - "integrity": "sha512-aRts8cZqxiJVDitmAh+3z+FxuO3tLNWEmwDRPEpDDiZJaRz06clP4XX112ynMT5uF0QNoMPajBBHnaStUEPJXA==", + "version": "15.2.0", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-15.2.0.tgz", + "integrity": "sha512-rJVZeIwHTUta23sIZgEIM62WYwbmGbThdbnkt81ravBplQv+HjyroqnLRNH2+sLJHcGZmLRmhPwACqhfTcOmnA==", "inBundle": true, "dependencies": { "@npmcli/git": "^4.0.0", diff --git a/package.json b/package.json index b8904d6c77494..e88777c450b98 100644 --- a/package.json +++ b/package.json @@ -101,7 +101,7 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^15.1.3", + "pacote": "^15.2.0", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", From 899ce1b839a4fd70c3ef2f5848fc56e0dd3f1be1 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:42:47 -0700 Subject: [PATCH 06/16] deps: nopt@7.2.0 --- node_modules/nopt/bin/nopt.js | 41 +---- node_modules/nopt/lib/debug.js | 6 +- node_modules/nopt/lib/nopt-lib.js | 249 +++++++++++++++++------------ node_modules/nopt/lib/nopt.js | 6 +- node_modules/nopt/lib/type-defs.js | 16 +- node_modules/nopt/package.json | 10 +- package-lock.json | 8 +- package.json | 2 +- 8 files changed, 179 insertions(+), 159 deletions(-) diff --git a/node_modules/nopt/bin/nopt.js b/node_modules/nopt/bin/nopt.js index bb04291c607ac..6ed2082064b5e 100755 --- a/node_modules/nopt/bin/nopt.js +++ b/node_modules/nopt/bin/nopt.js @@ -1,7 +1,8 @@ #!/usr/bin/env node -var nopt = require('../lib/nopt') -var path = require('path') -var types = { num: Number, +const nopt = require('../lib/nopt') +const path = require('path') +console.log('parsed', nopt({ + num: Number, bool: Boolean, help: Boolean, list: Array, @@ -13,8 +14,8 @@ var types = { num: Number, config: Boolean, length: Number, file: path, -} -var shorthands = { s: ['--str', 'astring'], +}, { + s: ['--str', 'astring'], b: ['--bool'], nb: ['--no-bool'], tft: ['--bool-list', '--no-bool-list', '--bool-list', 'true'], @@ -25,32 +26,4 @@ var shorthands = { s: ['--str', 'astring'], c: ['--config'], l: ['--length'], f: ['--file'], -} -var parsed = nopt(types - , shorthands - , process.argv - , 2) - -console.log('parsed', parsed) - -if (parsed.help) { - console.log('') - console.log('nopt cli tester') - console.log('') - console.log('types') - console.log(Object.keys(types).map(function M (t) { - var type = types[t] - if (Array.isArray(type)) { - return [t, type.map(function (mappedType) { - return mappedType.name - })] - } - return [t, type && type.name] - }).reduce(function (s, i) { - s[i[0]] = i[1] - return s - }, {})) - console.log('') - console.log('shorthands') - console.log(shorthands) -} +}, process.argv, 2)) diff --git a/node_modules/nopt/lib/debug.js b/node_modules/nopt/lib/debug.js index 194d0c6881882..e62198eb92e9a 100644 --- a/node_modules/nopt/lib/debug.js +++ b/node_modules/nopt/lib/debug.js @@ -1,6 +1,4 @@ /* istanbul ignore next */ module.exports = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG - ? function () { - console.error.apply(console, arguments) - } - : function () {} + ? (...a) => console.error(...a) + : () => {} diff --git a/node_modules/nopt/lib/nopt-lib.js b/node_modules/nopt/lib/nopt-lib.js index 89d269fb43f1a..d3d1de0255ba9 100644 --- a/node_modules/nopt/lib/nopt-lib.js +++ b/node_modules/nopt/lib/nopt-lib.js @@ -1,21 +1,47 @@ -var abbrev = require('abbrev') +const abbrev = require('abbrev') const debug = require('./debug') const defaultTypeDefs = require('./type-defs') -function nopt (args, { types, shorthands, typeDefs, invalidHandler }) { +const hasOwn = (o, k) => Object.prototype.hasOwnProperty.call(o, k) + +const getType = (k, { types, dynamicTypes }) => { + let hasType = hasOwn(types, k) + let type = types[k] + if (!hasType && typeof dynamicTypes === 'function') { + const matchedType = dynamicTypes(k) + if (matchedType !== undefined) { + type = matchedType + hasType = true + } + } + return [hasType, type] +} + +const isTypeDef = (type, def) => def && type === def +const hasTypeDef = (type, def) => def && type.indexOf(def) !== -1 +const doesNotHaveTypeDef = (type, def) => def && !hasTypeDef(type, def) + +function nopt (args, { + types, + shorthands, + typeDefs, + invalidHandler, + typeDefault, + dynamicTypes, +} = {}) { debug(types, shorthands, args, typeDefs) - var data = {} - var argv = { + const data = {} + const argv = { remain: [], cooked: args, original: args.slice(0), } - parse(args, data, argv.remain, { typeDefs, types, shorthands }) + parse(args, data, argv.remain, { typeDefs, types, dynamicTypes, shorthands }) // now data is full - clean(data, { types, typeDefs, invalidHandler }) + clean(data, { types, dynamicTypes, typeDefs, invalidHandler, typeDefault }) data.argv = argv Object.defineProperty(data.argv, 'toString', { @@ -28,30 +54,48 @@ function nopt (args, { types, shorthands, typeDefs, invalidHandler }) { return data } -function clean (data, { types, typeDefs, invalidHandler }) { - const StringType = typeDefs.String.type - const NumberType = typeDefs.Number.type - const ArrayType = typeDefs.Array.type - const BooleanType = typeDefs.Boolean.type - const DateType = typeDefs.Date.type +function clean (data, { + types = {}, + typeDefs = {}, + dynamicTypes, + invalidHandler, + typeDefault, +} = {}) { + const StringType = typeDefs.String?.type + const NumberType = typeDefs.Number?.type + const ArrayType = typeDefs.Array?.type + const BooleanType = typeDefs.Boolean?.type + const DateType = typeDefs.Date?.type + + const hasTypeDefault = typeof typeDefault !== 'undefined' + if (!hasTypeDefault) { + typeDefault = [false, true, null] + if (StringType) { + typeDefault.push(StringType) + } + if (ArrayType) { + typeDefault.push(ArrayType) + } + } - var remove = {} - var typeDefault = [false, true, null, StringType, ArrayType] + const remove = {} - Object.keys(data).forEach(function (k) { + Object.keys(data).forEach((k) => { if (k === 'argv') { return } - var val = data[k] - var isArray = Array.isArray(val) - var type = types[k] + let val = data[k] + debug('val=%j', val) + const isArray = Array.isArray(val) + let [hasType, rawType] = getType(k, { types, dynamicTypes }) + let type = rawType if (!isArray) { val = [val] } if (!type) { type = typeDefault } - if (type === ArrayType) { + if (isTypeDef(type, ArrayType)) { type = typeDefault.concat(ArrayType) } if (!Array.isArray(type)) { @@ -60,57 +104,62 @@ function clean (data, { types, typeDefs, invalidHandler }) { debug('val=%j', val) debug('types=', type) - val = val.map(function (v) { + val = val.map((v) => { // if it's an unknown value, then parse false/true/null/numbers/dates if (typeof v === 'string') { debug('string %j', v) v = v.trim() if ((v === 'null' && ~type.indexOf(null)) || (v === 'true' && - (~type.indexOf(true) || ~type.indexOf(BooleanType))) + (~type.indexOf(true) || hasTypeDef(type, BooleanType))) || (v === 'false' && - (~type.indexOf(false) || ~type.indexOf(BooleanType)))) { + (~type.indexOf(false) || hasTypeDef(type, BooleanType)))) { v = JSON.parse(v) debug('jsonable %j', v) - } else if (~type.indexOf(NumberType) && !isNaN(v)) { + } else if (hasTypeDef(type, NumberType) && !isNaN(v)) { debug('convert to number', v) v = +v - } else if (~type.indexOf(DateType) && !isNaN(Date.parse(v))) { + } else if (hasTypeDef(type, DateType) && !isNaN(Date.parse(v))) { debug('convert to date', v) v = new Date(v) } } - if (!Object.prototype.hasOwnProperty.call(types, k)) { - return v + if (!hasType) { + if (!hasTypeDefault) { + return v + } + // if the default type has been passed in then we want to validate the + // unknown data key instead of bailing out earlier. we also set the raw + // type which is passed to the invalid handler so that it can be + // determined if during validation if it is unknown vs invalid + rawType = typeDefault } // allow `--no-blah` to set 'blah' to null if null is allowed if (v === false && ~type.indexOf(null) && - !(~type.indexOf(false) || ~type.indexOf(BooleanType))) { + !(~type.indexOf(false) || hasTypeDef(type, BooleanType))) { v = null } - var d = {} + const d = {} d[k] = v - debug('prevalidated val', d, v, types[k]) - if (!validate(d, k, v, types[k], { typeDefs })) { + debug('prevalidated val', d, v, rawType) + if (!validate(d, k, v, rawType, { typeDefs })) { if (invalidHandler) { - invalidHandler(k, v, types[k], data) + invalidHandler(k, v, rawType, data) } else if (invalidHandler !== false) { - debug('invalid: ' + k + '=' + v, types[k]) + debug('invalid: ' + k + '=' + v, rawType) } return remove } - debug('validated v', d, v, types[k]) + debug('validated v', d, v, rawType) return d[k] - }).filter(function (v) { - return v !== remove - }) + }).filter((v) => v !== remove) // if we allow Array specifically, then an empty array is how we // express 'no value here', not null. Allow it. - if (!val.length && type.indexOf(ArrayType) === -1) { + if (!val.length && doesNotHaveTypeDef(type, ArrayType)) { debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(ArrayType)) delete data[k] } else if (isArray) { @@ -124,12 +173,12 @@ function clean (data, { types, typeDefs, invalidHandler }) { }) } -function validate (data, k, val, type, { typeDefs }) { - const ArrayType = typeDefs.Array.type +function validate (data, k, val, type, { typeDefs } = {}) { + const ArrayType = typeDefs?.Array?.type // arrays are lists of types. if (Array.isArray(type)) { for (let i = 0, l = type.length; i < l; i++) { - if (type[i] === ArrayType) { + if (isTypeDef(type[i], ArrayType)) { continue } if (validate(data, k, val, type[i], { typeDefs })) { @@ -141,7 +190,7 @@ function validate (data, k, val, type, { typeDefs }) { } // an array of anything? - if (type === ArrayType) { + if (isTypeDef(type, ArrayType)) { return true } @@ -166,17 +215,17 @@ function validate (data, k, val, type, { typeDefs }) { } // now go through the list of typeDefs, validate against each one. - var ok = false - var types = Object.keys(typeDefs) + let ok = false + const types = Object.keys(typeDefs) for (let i = 0, l = types.length; i < l; i++) { debug('test type %j %j %j', k, val, types[i]) - var t = typeDefs[types[i]] + const t = typeDefs[types[i]] if (t && ( (type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type) )) { - var d = {} + const d = {} ok = t.validate(d, k, val) !== false val = d[k] if (ok) { @@ -193,19 +242,25 @@ function validate (data, k, val, type, { typeDefs }) { return ok } -function parse (args, data, remain, { typeDefs, types, shorthands }) { - const StringType = typeDefs.String.type - const NumberType = typeDefs.String.type - const ArrayType = typeDefs.Array.type - const BooleanType = typeDefs.Boolean.type +function parse (args, data, remain, { + types = {}, + typeDefs = {}, + shorthands = {}, + dynamicTypes, +} = {}) { + const StringType = typeDefs.String?.type + const NumberType = typeDefs.Number?.type + const ArrayType = typeDefs.Array?.type + const BooleanType = typeDefs.Boolean?.type debug('parse', args, data, remain) - var abbrevs = abbrev(Object.keys(types)) - var shortAbbr = abbrev(Object.keys(shorthands)) + const abbrevs = abbrev(Object.keys(types)) + debug('abbrevs=%j', abbrevs) + const shortAbbr = abbrev(Object.keys(shorthands)) - for (var i = 0; i < args.length; i++) { - var arg = args[i] + for (let i = 0; i < args.length; i++) { + let arg = args[i] debug('arg', arg) if (arg.match(/^-{2,}$/)) { @@ -215,22 +270,21 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { args[i] = '--' break } - var hadEq = false + let hadEq = false if (arg.charAt(0) === '-' && arg.length > 1) { - var at = arg.indexOf('=') + const at = arg.indexOf('=') if (at > -1) { hadEq = true - var v = arg.slice(at + 1) + const v = arg.slice(at + 1) arg = arg.slice(0, at) args.splice(i, 1, arg, v) } // see if it's a shorthand // if so, splice and back up to re-parse it. - var shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands }) + const shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands }) debug('arg=%j shRes=%j', arg, shRes) if (shRes) { - debug(arg, shRes) args.splice.apply(args, [i, 1].concat(shRes)) if (arg !== shRes[0]) { i-- @@ -238,7 +292,7 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { } } arg = arg.replace(/^-+/, '') - var no = null + let no = null while (arg.toLowerCase().indexOf('no-') === 0) { no = !no arg = arg.slice(3) @@ -248,33 +302,30 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { arg = abbrevs[arg] } - var argType = types[arg] - var isTypeArray = Array.isArray(argType) + let [hasType, argType] = getType(arg, { types, dynamicTypes }) + let isTypeArray = Array.isArray(argType) if (isTypeArray && argType.length === 1) { isTypeArray = false argType = argType[0] } - var isArray = argType === ArrayType || - isTypeArray && argType.indexOf(ArrayType) !== -1 + let isArray = isTypeDef(argType, ArrayType) || + isTypeArray && hasTypeDef(argType, ArrayType) // allow unknown things to be arrays if specified multiple times. - if ( - !Object.prototype.hasOwnProperty.call(types, arg) && - Object.prototype.hasOwnProperty.call(data, arg) - ) { + if (!hasType && hasOwn(data, arg)) { if (!Array.isArray(data[arg])) { data[arg] = [data[arg]] } isArray = true } - var val - var la = args[i + 1] + let val + let la = args[i + 1] - var isBool = typeof no === 'boolean' || - argType === BooleanType || - isTypeArray && argType.indexOf(BooleanType) !== -1 || + const isBool = typeof no === 'boolean' || + isTypeDef(argType, BooleanType) || + isTypeArray && hasTypeDef(argType, BooleanType) || (typeof argType === 'undefined' && !hadEq) || (la === 'false' && (argType === null || @@ -305,11 +356,11 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { i++ } else if (!la.match(/^-{2,}[^-]/) && !isNaN(la) && - ~argType.indexOf(NumberType)) { + hasTypeDef(argType, NumberType)) { // number val = +la i++ - } else if (!la.match(/^-[^-]/) && ~argType.indexOf(StringType)) { + } else if (!la.match(/^-[^-]/) && hasTypeDef(argType, StringType)) { // string val = la i++ @@ -325,7 +376,7 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { continue } - if (argType === StringType) { + if (isTypeDef(argType, StringType)) { if (la === undefined) { la = '' } else if (la.match(/^-{1,2}[^-]+/)) { @@ -353,7 +404,26 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { } } -function resolveShort (arg, shortAbbr, abbrevs, { shorthands }) { +const SINGLES = Symbol('singles') +const singleCharacters = (arg, shorthands) => { + let singles = shorthands[SINGLES] + if (!singles) { + singles = Object.keys(shorthands).filter((s) => s.length === 1).reduce((l, r) => { + l[r] = true + return l + }, {}) + shorthands[SINGLES] = singles + debug('shorthand singles', singles) + } + const chrs = arg.split('').filter((c) => singles[c]) + return chrs.join('') === arg ? chrs : null +} + +function resolveShort (arg, ...rest) { + const { types = {}, shorthands = {} } = rest.length ? rest.pop() : {} + const shortAbbr = rest[0] ?? abbrev(Object.keys(shorthands)) + const abbrevs = rest[1] ?? abbrev(Object.keys(types)) + // handle single-char shorthands glommed together, like // npm ls -glp, but only if there is one dash, and only if // all of the chars are single-char shorthands, and it's @@ -376,28 +446,9 @@ function resolveShort (arg, shortAbbr, abbrevs, { shorthands }) { } // first check to see if this arg is a set of single-char shorthands - var singles = shorthands.___singles - if (!singles) { - singles = Object.keys(shorthands).filter(function (s) { - return s.length === 1 - }).reduce(function (l, r) { - l[r] = true - return l - }, {}) - shorthands.___singles = singles - debug('shorthand singles', singles) - } - - var chrs = arg.split('').filter(function (c) { - return singles[c] - }) - - if (chrs.join('') === arg) { - return chrs.map(function (c) { - return shorthands[c] - }).reduce(function (l, r) { - return l.concat(r) - }, []) + const chrs = singleCharacters(arg, shorthands) + if (chrs) { + return chrs.map((c) => shorthands[c]).reduce((l, r) => l.concat(r), []) } // if it's an arg abbrev, and not a literal shorthand, then prefer the arg diff --git a/node_modules/nopt/lib/nopt.js b/node_modules/nopt/lib/nopt.js index 70fd809bc111e..37f01a08783f8 100644 --- a/node_modules/nopt/lib/nopt.js +++ b/node_modules/nopt/lib/nopt.js @@ -12,9 +12,9 @@ exports.clean = clean exports.typeDefs = defaultTypeDefs exports.lib = lib -function nopt (types = {}, shorthands = {}, args = process.argv, slice = 2) { +function nopt (types, shorthands, args = process.argv, slice = 2) { return lib.nopt(args.slice(slice), { - types, + types: types || {}, shorthands: shorthands || {}, typeDefs: exports.typeDefs, invalidHandler: exports.invalidHandler, @@ -23,7 +23,7 @@ function nopt (types = {}, shorthands = {}, args = process.argv, slice = 2) { function clean (data, types, typeDefs = exports.typeDefs) { return lib.clean(data, { - types, + types: types || {}, typeDefs, invalidHandler: exports.invalidHandler, }) diff --git a/node_modules/nopt/lib/type-defs.js b/node_modules/nopt/lib/type-defs.js index 6acf5e0a5b9d4..608352ee248cc 100644 --- a/node_modules/nopt/lib/type-defs.js +++ b/node_modules/nopt/lib/type-defs.js @@ -1,7 +1,7 @@ -var url = require('url') -var path = require('path') -var Stream = require('stream').Stream -var os = require('os') +const url = require('url') +const path = require('path') +const Stream = require('stream').Stream +const os = require('os') const debug = require('./debug') function validateString (data, k, val) { @@ -18,9 +18,9 @@ function validatePath (data, k, val) { val = String(val) - var isWin = process.platform === 'win32' - var homePattern = isWin ? /^~(\/|\\)/ : /^~\// - var home = os.homedir() + const isWin = process.platform === 'win32' + const homePattern = isWin ? /^~(\/|\\)/ : /^~\// + const home = os.homedir() if (home && val.match(homePattern)) { data[k] = path.resolve(home, val.slice(2)) @@ -39,7 +39,7 @@ function validateNumber (data, k, val) { } function validateDate (data, k, val) { - var s = Date.parse(val) + const s = Date.parse(val) debug('validate Date %j %j %j', k, val, s) if (isNaN(s)) { return false diff --git a/node_modules/nopt/package.json b/node_modules/nopt/package.json index a61cae4535f88..01b7de8fed934 100644 --- a/node_modules/nopt/package.json +++ b/node_modules/nopt/package.json @@ -1,6 +1,6 @@ { "name": "nopt", - "version": "7.1.0", + "version": "7.2.0", "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", "author": "GitHub Inc.", "main": "lib/nopt.js", @@ -26,13 +26,10 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.12.0", + "@npmcli/template-oss": "4.15.1", "tap": "^16.3.0" }, "tap": { - "lines": 91, - "branches": 87, - "statements": 91, "nyc-arg": [ "--exclude", "tap-snapshots/**" @@ -48,6 +45,7 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.12.0" + "version": "4.15.1", + "publish": true } } diff --git a/package-lock.json b/package-lock.json index 7651a93dfd2ac..39aca97239c4a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -124,7 +124,7 @@ "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", "node-gyp": "^9.4.0", - "nopt": "^7.1.0", + "nopt": "^7.2.0", "npm-audit-report": "^5.0.0", "npm-install-checks": "^6.1.1", "npm-package-arg": "^10.1.0", @@ -9405,9 +9405,9 @@ "dev": true }, "node_modules/nopt": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.1.0.tgz", - "integrity": "sha512-ZFPLe9Iu0tnx7oWhFxAo4s7QTn8+NNDDxYNaKLjE7Dp0tbakQ3M1QhQzsnzXHQBTUO3K9BmwaxnyO8Ayn2I95Q==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.0.tgz", + "integrity": "sha512-CVDtwCdhYIvnAzFoJ6NJ6dX3oga9/HyciQDnG1vQDjSLMeKLJ4A93ZqYKDrgYSr1FBY5/hMYC+2VCi24pgpkGA==", "inBundle": true, "dependencies": { "abbrev": "^2.0.0" diff --git a/package.json b/package.json index e88777c450b98..7997f427f7e60 100644 --- a/package.json +++ b/package.json @@ -91,7 +91,7 @@ "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", "node-gyp": "^9.4.0", - "nopt": "^7.1.0", + "nopt": "^7.2.0", "npm-audit-report": "^5.0.0", "npm-install-checks": "^6.1.1", "npm-package-arg": "^10.1.0", From 24fffe7219eed918ff55f4c1cae48f6afb4296b5 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:43:54 -0700 Subject: [PATCH 07/16] deps: semver@7.5.2 --- node_modules/semver/classes/comparator.js | 3 +- node_modules/semver/classes/range.js | 64 +++++++++++++---------- node_modules/semver/classes/semver.js | 8 +-- node_modules/semver/functions/coerce.js | 2 +- node_modules/semver/functions/diff.js | 51 +++++++++++------- node_modules/semver/internal/re.js | 11 ++++ node_modules/semver/package.json | 8 +-- package-lock.json | 8 +-- package.json | 2 +- 9 files changed, 96 insertions(+), 61 deletions(-) diff --git a/node_modules/semver/classes/comparator.js b/node_modules/semver/classes/comparator.js index 2146c884bd937..3d39c0eef7802 100644 --- a/node_modules/semver/classes/comparator.js +++ b/node_modules/semver/classes/comparator.js @@ -16,6 +16,7 @@ class Comparator { } } + comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose @@ -133,7 +134,7 @@ class Comparator { module.exports = Comparator const parseOptions = require('../internal/parse-options') -const { re, t } = require('../internal/re') +const { safeRe: re, t } = require('../internal/re') const cmp = require('../functions/cmp') const debug = require('../internal/debug') const SemVer = require('./semver') diff --git a/node_modules/semver/classes/range.js b/node_modules/semver/classes/range.js index d9e866de4dcd1..53c2540fd012e 100644 --- a/node_modules/semver/classes/range.js +++ b/node_modules/semver/classes/range.js @@ -26,19 +26,26 @@ class Range { this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease - // First, split based on boolean or || + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. this.raw = range - this.set = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw .split('||') // map the range to a 2d array of comparators - .map(r => this.parseRange(r.trim())) + .map(r => this.parseRange(r)) // throw out any comparator lists that are empty // this generally means that it was not a valid range, which is allowed // in loose mode, but will still throw if the WHOLE range is invalid. .filter(c => c.length) if (!this.set.length) { - throw new TypeError(`Invalid SemVer Range: ${range}`) + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) } // if we have any that are not the null set, throw out null sets. @@ -64,9 +71,7 @@ class Range { format () { this.range = this.set - .map((comps) => { - return comps.join(' ').trim() - }) + .map((comps) => comps.join(' ').trim()) .join('||') .trim() return this.range @@ -77,8 +82,6 @@ class Range { } parseRange (range) { - range = range.trim() - // memoize range parsing for performance. // this is a very hot path, and fully deterministic. const memoOpts = @@ -105,9 +108,6 @@ class Range { // `^ 1.2.3` => `^1.2.3` range = range.replace(re[t.CARETTRIM], caretTrimReplace) - // normalize spaces - range = range.split(/\s+/).join(' ') - // At this point, the range is completely trimmed and // ready to be split into comparators. @@ -203,7 +203,7 @@ const Comparator = require('./comparator') const debug = require('../internal/debug') const SemVer = require('./semver') const { - re, + safeRe: re, t, comparatorTrimReplace, tildeTrimReplace, @@ -257,10 +257,13 @@ const isX = id => !id || id.toLowerCase() === 'x' || id === '*' // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 // ~0.0.1 --> >=0.0.1 <0.1.0-0 -const replaceTildes = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceTilde(c, options) - }).join(' ') +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} const replaceTilde = (comp, options) => { const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] @@ -298,10 +301,13 @@ const replaceTilde = (comp, options) => { // ^1.2.0 --> >=1.2.0 <2.0.0-0 // ^0.0.1 --> >=0.0.1 <0.0.2-0 // ^0.1.0 --> >=0.1.0 <0.2.0-0 -const replaceCarets = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceCaret(c, options) - }).join(' ') +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} const replaceCaret = (comp, options) => { debug('caret', comp, options) @@ -358,9 +364,10 @@ const replaceCaret = (comp, options) => { const replaceXRanges = (comp, options) => { debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map((c) => { - return replaceXRange(c, options) - }).join(' ') + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') } const replaceXRange = (comp, options) => { @@ -443,12 +450,15 @@ const replaceXRange = (comp, options) => { const replaceStars = (comp, options) => { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[t.STAR], '') + return comp + .trim() + .replace(re[t.STAR], '') } const replaceGTE0 = (comp, options) => { debug('replaceGTE0', comp, options) - return comp.trim() + return comp + .trim() .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') } @@ -486,7 +496,7 @@ const hyphenReplace = incPr => ($0, to = `<=${to}` } - return (`${from} ${to}`).trim() + return `${from} ${to}`.trim() } const testSet = (set, version, options) => { diff --git a/node_modules/semver/classes/semver.js b/node_modules/semver/classes/semver.js index 99dbe82db4dc5..84e84590e3871 100644 --- a/node_modules/semver/classes/semver.js +++ b/node_modules/semver/classes/semver.js @@ -1,6 +1,6 @@ const debug = require('../internal/debug') const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') -const { re, t } = require('../internal/re') +const { safeRe: re, t } = require('../internal/re') const parseOptions = require('../internal/parse-options') const { compareIdentifiers } = require('../internal/identifiers') @@ -291,8 +291,10 @@ class SemVer { default: throw new Error(`invalid increment argument: ${release}`) } - this.format() - this.raw = this.version + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } return this } } diff --git a/node_modules/semver/functions/coerce.js b/node_modules/semver/functions/coerce.js index 2e01452fddad6..febbff9c273ce 100644 --- a/node_modules/semver/functions/coerce.js +++ b/node_modules/semver/functions/coerce.js @@ -1,6 +1,6 @@ const SemVer = require('../classes/semver') const parse = require('./parse') -const { re, t } = require('../internal/re') +const { safeRe: re, t } = require('../internal/re') const coerce = (version, options) => { if (version instanceof SemVer) { diff --git a/node_modules/semver/functions/diff.js b/node_modules/semver/functions/diff.js index fafc11c40d345..fc224e302c0e4 100644 --- a/node_modules/semver/functions/diff.js +++ b/node_modules/semver/functions/diff.js @@ -13,6 +13,35 @@ const diff = (version1, version2) => { const highVersion = v1Higher ? v1 : v2 const lowVersion = v1Higher ? v2 : v1 const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } // add the `pre` prefix if we are going to a prerelease version const prefix = highHasPre ? 'pre' : '' @@ -29,26 +58,8 @@ const diff = (version1, version2) => { return prefix + 'patch' } - // at this point we know stable versions match but overall versions are not equal, - // so either they are both prereleases, or the lower version is a prerelease - - if (highHasPre) { - // high and low are preleases - return 'prerelease' - } - - if (lowVersion.patch) { - // anything higher than a patch bump would result in the wrong version - return 'patch' - } - - if (lowVersion.minor) { - // anything higher than a minor bump would result in the wrong version - return 'minor' - } - - // bumping major/minor/patch all have same result - return 'major' + // high and low are preleases + return 'prerelease' } module.exports = diff diff --git a/node_modules/semver/internal/re.js b/node_modules/semver/internal/re.js index ed88398a9dbf5..f73ef1aa06263 100644 --- a/node_modules/semver/internal/re.js +++ b/node_modules/semver/internal/re.js @@ -4,16 +4,27 @@ exports = module.exports = {} // The actual regexps go on exports.re const re = exports.re = [] +const safeRe = exports.safeRe = [] const src = exports.src = [] const t = exports.t = {} let R = 0 const createToken = (name, value, isGlobal) => { + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + const safe = value + .split('\\s*').join('\\s{0,1}') + .split('\\s+').join('\\s') const index = R++ debug(name, index, value) t[name] = index src[index] = value re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) } // The following Regular Expressions can be used for tokenizing, diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json index 592404a3c9d1c..7d0aff3c03c27 100644 --- a/node_modules/semver/package.json +++ b/node_modules/semver/package.json @@ -1,6 +1,6 @@ { "name": "semver", - "version": "7.5.1", + "version": "7.5.2", "description": "The semantic version parser used by npm.", "main": "index.js", "scripts": { @@ -14,7 +14,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.15.1", "tap": "^16.0.0" }, "license": "ISC", @@ -37,7 +37,7 @@ "range.bnf" ], "tap": { - "check-coverage": true, + "timeout": 30, "coverage-map": "map.js", "nyc-arg": [ "--exclude", @@ -53,7 +53,7 @@ "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.15.1", "engines": ">=10", "ciVersions": [ "10.0.0", diff --git a/package-lock.json b/package-lock.json index 39aca97239c4a..94a29d5db33f8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -139,7 +139,7 @@ "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", "read": "^2.1.0", - "semver": "^7.5.1", + "semver": "^7.5.2", "sigstore": "^1.5.0", "ssri": "^10.0.4", "supports-color": "^9.3.1", @@ -11366,9 +11366,9 @@ } }, "node_modules/semver": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.1.tgz", - "integrity": "sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==", + "version": "7.5.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.2.tgz", + "integrity": "sha512-SoftuTROv/cRjCze/scjGyiDtcUyxw1rgYQSZY7XTmtR5hX+dm76iDbTH8TkLPHCQmlbQVSSbNZCPM2hb0knnQ==", "inBundle": true, "dependencies": { "lru-cache": "^6.0.0" diff --git a/package.json b/package.json index 7997f427f7e60..fd2ef9c34e7c4 100644 --- a/package.json +++ b/package.json @@ -106,7 +106,7 @@ "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", "read": "^2.1.0", - "semver": "^7.5.1", + "semver": "^7.5.2", "sigstore": "^1.5.0", "ssri": "^10.0.4", "supports-color": "^9.3.1", From eb14bfc51635c9c509c85205b21b998bbf3c28a6 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:44:55 -0700 Subject: [PATCH 08/16] deps: sigstore@1.6.0 --- DEPENDENCIES.md | 4 + node_modules/.gitignore | 1 + node_modules/@sigstore/tuf/LICENSE | 202 ++++++++++++++++++ .../util => @sigstore/tuf/dist}/appdata.js | 18 ++ .../index.js => @sigstore/tuf/dist/client.js} | 47 +--- node_modules/@sigstore/tuf/dist/error.js | 12 ++ node_modules/@sigstore/tuf/dist/index.js | 55 +++++ .../dist/tuf => @sigstore/tuf/dist}/target.js | 10 +- node_modules/@sigstore/tuf/package.json | 45 ++++ .../tuf/store/public-good-instance-root.json | 1 + node_modules/sigstore/dist/external/rekor.js | 4 +- node_modules/sigstore/dist/index.js | 15 -- node_modules/sigstore/dist/sigstore.js | 27 ++- node_modules/sigstore/dist/tlog/format.js | 55 +++-- .../tlog/types/__generated__/hashedrekord.js | 8 - .../dist/tlog/types/__generated__/intoto.js | 8 - .../sigstore/dist/tlog/types/index.js | 5 - .../sigstore/dist/tlog/verify/body.js | 39 ++++ .../sigstore/dist/types/sigstore/index.js | 3 +- .../sigstore/dist/{x509 => util}/asn1/dump.js | 0 .../dist/{x509 => util}/asn1/error.js | 0 node_modules/sigstore/dist/util/asn1/index.js | 20 ++ .../dist/{x509 => util}/asn1/length.js | 0 .../sigstore/dist/{x509 => util}/asn1/obj.js | 7 +- .../dist/{x509 => util}/asn1/parse.js | 0 .../sigstore/dist/{x509 => util}/asn1/tag.js | 0 node_modules/sigstore/dist/util/index.js | 4 +- node_modules/sigstore/dist/util/stream.js | 2 +- node_modules/sigstore/dist/x509/cert.js | 4 +- node_modules/sigstore/dist/x509/ext.js | 15 -- node_modules/sigstore/package.json | 11 +- package-lock.json | 23 +- package.json | 2 +- 33 files changed, 510 insertions(+), 137 deletions(-) create mode 100644 node_modules/@sigstore/tuf/LICENSE rename node_modules/{sigstore/dist/util => @sigstore/tuf/dist}/appdata.js (61%) rename node_modules/{sigstore/dist/tuf/index.js => @sigstore/tuf/dist/client.js} (63%) create mode 100644 node_modules/@sigstore/tuf/dist/error.js create mode 100644 node_modules/@sigstore/tuf/dist/index.js rename node_modules/{sigstore/dist/tuf => @sigstore/tuf/dist}/target.js (92%) create mode 100644 node_modules/@sigstore/tuf/package.json create mode 100644 node_modules/@sigstore/tuf/store/public-good-instance-root.json delete mode 100644 node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js delete mode 100644 node_modules/sigstore/dist/tlog/types/__generated__/intoto.js delete mode 100644 node_modules/sigstore/dist/tlog/types/index.js rename node_modules/sigstore/dist/{x509 => util}/asn1/dump.js (100%) rename node_modules/sigstore/dist/{x509 => util}/asn1/error.js (100%) create mode 100644 node_modules/sigstore/dist/util/asn1/index.js rename node_modules/sigstore/dist/{x509 => util}/asn1/length.js (100%) rename node_modules/sigstore/dist/{x509 => util}/asn1/obj.js (95%) rename node_modules/sigstore/dist/{x509 => util}/asn1/parse.js (100%) rename node_modules/sigstore/dist/{x509 => util}/asn1/tag.js (100%) diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index d14469bdc56ff..95ef9a66dffaa 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -770,7 +770,11 @@ graph LR; shebang-command-->shebang-regex; sigstore-->make-fetch-happen; sigstore-->sigstore-protobuf-specs["@sigstore/protobuf-specs"]; + sigstore-->sigstore-tuf["@sigstore/tuf"]; sigstore-->tuf-js; + sigstore-tuf-->make-fetch-happen; + sigstore-tuf-->sigstore-protobuf-specs["@sigstore/protobuf-specs"]; + sigstore-tuf-->tuf-js; socks-->ip; socks-->smart-buffer; socks-proxy-agent-->agent-base; diff --git a/node_modules/.gitignore b/node_modules/.gitignore index a96ea88cb87dd..a32136b521357 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -38,6 +38,7 @@ !/@sigstore/ /@sigstore/* !/@sigstore/protobuf-specs +!/@sigstore/tuf !/@tootallnate/ /@tootallnate/* !/@tootallnate/once diff --git a/node_modules/@sigstore/tuf/LICENSE b/node_modules/@sigstore/tuf/LICENSE new file mode 100644 index 0000000000000..e9e7c1679a09d --- /dev/null +++ b/node_modules/@sigstore/tuf/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/sigstore/dist/util/appdata.js b/node_modules/@sigstore/tuf/dist/appdata.js similarity index 61% rename from node_modules/sigstore/dist/util/appdata.js rename to node_modules/@sigstore/tuf/dist/appdata.js index d0c7f6f079e50..c9a8ee92b531e 100644 --- a/node_modules/sigstore/dist/util/appdata.js +++ b/node_modules/@sigstore/tuf/dist/appdata.js @@ -4,19 +4,37 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); exports.appDataPath = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ const os_1 = __importDefault(require("os")); const path_1 = __importDefault(require("path")); function appDataPath(name) { const homedir = os_1.default.homedir(); switch (process.platform) { + /* istanbul ignore next */ case 'darwin': { const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); return path_1.default.join(appSupport, name); } + /* istanbul ignore next */ case 'win32': { const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); return path_1.default.join(localAppData, name, 'Data'); } + /* istanbul ignore next */ default: { const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); return path_1.default.join(localData, name); diff --git a/node_modules/sigstore/dist/tuf/index.js b/node_modules/@sigstore/tuf/dist/client.js similarity index 63% rename from node_modules/sigstore/dist/tuf/index.js rename to node_modules/@sigstore/tuf/dist/client.js index 86a081de9f3af..08d6b61840909 100644 --- a/node_modules/sigstore/dist/tuf/index.js +++ b/node_modules/@sigstore/tuf/dist/client.js @@ -1,32 +1,9 @@ "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFClient = exports.getTrustedRoot = void 0; +exports.TUFClient = void 0; /* Copyright 2023 The Sigstore Authors. @@ -45,27 +22,12 @@ limitations under the License. const fs_1 = __importDefault(require("fs")); const path_1 = __importDefault(require("path")); const tuf_js_1 = require("tuf-js"); -const sigstore = __importStar(require("../types/sigstore")); -const util_1 = require("../util"); const target_1 = require("./target"); -const TRUSTED_ROOT_TARGET = 'trusted_root.json'; -const DEFAULT_CACHE_DIR = util_1.appdata.appDataPath('sigstore-js'); -const DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; -const DEFAULT_TUF_ROOT_PATH = '../../store/public-good-instance-root.json'; -async function getTrustedRoot(options = {}) { - const client = new TUFClient(options); - const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); - return sigstore.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); -} -exports.getTrustedRoot = getTrustedRoot; class TUFClient { constructor(options) { - const cachePath = options.cachePath || DEFAULT_CACHE_DIR; - const tufRootPath = options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH); - const mirrorURL = options.mirrorURL || DEFAULT_MIRROR_URL; - initTufCache(cachePath, tufRootPath); - const remote = initRemoteConfig(cachePath, mirrorURL); - this.updater = initClient(cachePath, remote, options); + initTufCache(options.cachePath, options.rootPath); + const remote = initRemoteConfig(options.cachePath, options.mirrorURL); + this.updater = initClient(options.cachePath, remote, options); } async refresh() { return this.updater.refresh(); @@ -117,6 +79,7 @@ function initClient(cachePath, remote, options) { }; // tuf-js only supports a number for fetchRetries so we have to // convert the boolean and object options to a number. + /* istanbul ignore if */ if (typeof options.retry !== 'undefined') { if (typeof options.retry === 'number') { config.fetchRetries = options.retry; diff --git a/node_modules/@sigstore/tuf/dist/error.js b/node_modules/@sigstore/tuf/dist/error.js new file mode 100644 index 0000000000000..e13971b289ff2 --- /dev/null +++ b/node_modules/@sigstore/tuf/dist/error.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = void 0; +class TUFError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +exports.TUFError = TUFError; diff --git a/node_modules/@sigstore/tuf/dist/index.js b/node_modules/@sigstore/tuf/dist/index.js new file mode 100644 index 0000000000000..0d201c356dffc --- /dev/null +++ b/node_modules/@sigstore/tuf/dist/index.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = exports.initTUF = exports.getTrustedRoot = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const appdata_1 = require("./appdata"); +const client_1 = require("./client"); +const DEFAULT_CACHE_DIR = 'sigstore-js'; +const DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; +const DEFAULT_TUF_ROOT_PATH = '../store/public-good-instance-root.json'; +const DEFAULT_RETRY = { retries: 2 }; +const DEFAULT_TIMEOUT = 5000; +const TRUSTED_ROOT_TARGET = 'trusted_root.json'; +async function getTrustedRoot( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); + return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); +} +exports.getTrustedRoot = getTrustedRoot; +async function initTUF( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + return client.refresh().then(() => client); +} +exports.initTUF = initTUF; +// Create a TUF client with default options +function createClient(options) { + /* istanbul ignore next */ + return new client_1.TUFClient({ + cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR), + rootPath: options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH), + mirrorURL: options.mirrorURL || DEFAULT_MIRROR_URL, + retry: options.retry ?? DEFAULT_RETRY, + timeout: options.timeout ?? DEFAULT_TIMEOUT, + }); +} +var error_1 = require("./error"); +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } }); diff --git a/node_modules/sigstore/dist/tuf/target.js b/node_modules/@sigstore/tuf/dist/target.js similarity index 92% rename from node_modules/sigstore/dist/tuf/target.js rename to node_modules/@sigstore/tuf/dist/target.js index d7df61e5a4076..29eaf99a7e721 100644 --- a/node_modules/sigstore/dist/tuf/target.js +++ b/node_modules/@sigstore/tuf/dist/target.js @@ -20,14 +20,14 @@ See the License for the specific language governing permissions and limitations under the License. */ const fs_1 = __importDefault(require("fs")); -const error_1 = require("../error"); +const error_1 = require("./error"); // Downloads and returns the specified target from the provided TUF Updater. async function readTarget(tuf, targetPath) { const path = await getTargetPath(tuf, targetPath); return new Promise((resolve, reject) => { fs_1.default.readFile(path, 'utf-8', (err, data) => { if (err) { - reject(new error_1.InternalError({ + reject(new error_1.TUFError({ code: 'TUF_READ_TARGET_ERROR', message: `error reading target ${path}`, cause: err, @@ -49,14 +49,14 @@ async function getTargetPath(tuf, target) { targetInfo = await tuf.getTargetInfo(target); } catch (err) { - throw new error_1.InternalError({ + throw new error_1.TUFError({ code: 'TUF_REFRESH_METADATA_ERROR', message: 'error refreshing TUF metadata', cause: err, }); } if (!targetInfo) { - throw new error_1.InternalError({ + throw new error_1.TUFError({ code: 'TUF_FIND_TARGET_ERROR', message: `target ${target} not found`, }); @@ -69,7 +69,7 @@ async function getTargetPath(tuf, target) { path = await tuf.downloadTarget(targetInfo); } catch (err) { - throw new error_1.InternalError({ + throw new error_1.TUFError({ code: 'TUF_DOWNLOAD_TARGET_ERROR', message: `error downloading target ${path}`, cause: err, diff --git a/node_modules/@sigstore/tuf/package.json b/node_modules/@sigstore/tuf/package.json new file mode 100644 index 0000000000000..241dc32b3c8a9 --- /dev/null +++ b/node_modules/@sigstore/tuf/package.json @@ -0,0 +1,45 @@ +{ + "name": "@sigstore/tuf", + "version": "1.0.0", + "description": "Client for the Sigstore TUF repository", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@total-typescript/shoehorn": "^0.1.0", + "@tufjs/repo-mock": "^1.1.0", + "@types/node": "^20.2.5", + "nock": "^13.2.4", + "shx": "^0.3.3", + "typescript": "^5.1.3" + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.1.0", + "tuf-js": "^1.1.3", + "make-fetch-happen": "^11.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/@sigstore/tuf/store/public-good-instance-root.json b/node_modules/@sigstore/tuf/store/public-good-instance-root.json new file mode 100644 index 0000000000000..e95c7e88cdf09 --- /dev/null +++ b/node_modules/@sigstore/tuf/store/public-good-instance-root.json @@ -0,0 +1 @@ +{"signed":{"_type":"root","spec_version":"1.0","version":7,"expires":"2023-10-04T13:08:11Z","keys":{"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"}},"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"}},"45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"}},"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"}},"e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"}},"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"}},"ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"}}},"roles":{"root":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"snapshot":{"keyids":["45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b"],"threshold":1},"targets":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"timestamp":{"keyids":["e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a"],"threshold":1}},"consistent_snapshot":true},"signatures":[{"keyid":"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","sig":"3046022100c0610c0055ce5c4a52d054d7322e7b514d55baf44423d63aa4daa077cc60fd1f022100a097f2803f090fb66c42ead915a2c46ebe7db53a32bf18f2188275cc936f8bdd"},{"keyid":"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","sig":"304502203134f0468810299d5493a867c40630b341296b92e59c29821311d353343bb3a4022100e667ae3d304e7e3da0894c7425f6b9ecd917106841280e5cf6f3496ad5f8f68e"},{"keyid":"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","sig":"3045022037fe5f45426f21eaaf4730d2136f2b1611d6379688f79b9d1e3f61719997135c022100b63b022d7b79d4694b96f416d88aa4d7b1a3bff8a01f4fb51e0f42137c7d2d06"},{"keyid":"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de","sig":"3044022007cc8fcc4940809f2751ad5b535f4c5f53f5b4952f5b5696b09668e743306ac1022006dfcdf94e94c92163eeb1b47796db62cedaa730aa13aa61b573fe23714730f2"}]} diff --git a/node_modules/sigstore/dist/external/rekor.js b/node_modules/sigstore/dist/external/rekor.js index 80650ce02ff9b..b6bbeb6f20793 100644 --- a/node_modules/sigstore/dist/external/rekor.js +++ b/node_modules/sigstore/dist/external/rekor.js @@ -39,7 +39,7 @@ class Rekor { } /** * Create a new entry in the Rekor log. - * @param propsedEntry {EntryKind} Data to create a new entry + * @param propsedEntry {ProposedEntry} Data to create a new entry * @returns {Promise} The created entry */ async createEntry(propsedEntry) { @@ -107,7 +107,7 @@ function entryFromResponse(data) { throw new Error('Received multiple entries in Rekor response'); } // Grab UUID and entry data from the response - const [uuid, entry] = Object.entries(data)[0]; + const [uuid, entry] = entries[0]; return { ...entry, uuid, diff --git a/node_modules/sigstore/dist/index.js b/node_modules/sigstore/dist/index.js index 502155e4d5f3f..126fce58e45bd 100644 --- a/node_modules/sigstore/dist/index.js +++ b/node_modules/sigstore/dist/index.js @@ -24,19 +24,4 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); exports.sigstore = void 0; -/* -Copyright 2022 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ exports.sigstore = __importStar(require("./sigstore")); diff --git a/node_modules/sigstore/dist/sigstore.js b/node_modules/sigstore/dist/sigstore.js index 8d245e17b2a0c..a14c5957954d8 100644 --- a/node_modules/sigstore/dist/sigstore.js +++ b/node_modules/sigstore/dist/sigstore.js @@ -39,9 +39,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ +const tuf = __importStar(require("@sigstore/tuf")); const config = __importStar(require("./config")); const sign_1 = require("./sign"); -const tuf = __importStar(require("./tuf")); const sigstore = __importStar(require("./types/sigstore")); const verify_1 = require("./verify"); async function sign(payload, options = {}) { @@ -51,7 +51,9 @@ async function sign(payload, options = {}) { const signer = new sign_1.Signer({ ca, tlog, - identityProviders: idps, + identityProviders: options.identityProvider + ? [options.identityProvider] + : idps, tlogUpload: options.tlogUpload, }); const bundle = await signer.signBlob(payload); @@ -67,7 +69,9 @@ async function attest(payload, payloadType, options = {}) { ca, tlog, tsa, - identityProviders: idps, + identityProviders: options.identityProvider + ? [options.identityProvider] + : idps, tlogUpload: options.tlogUpload, }); const bundle = await signer.signAttestation(payload, payloadType); @@ -90,20 +94,27 @@ async function verify(bundle, payload, options = {}) { exports.verify = verify; const tufUtils = { client: (options = {}) => { - const t = new tuf.TUFClient({ + return tuf.initTUF({ mirrorURL: options.tufMirrorURL, rootPath: options.tufRootPath, cachePath: options.tufCachePath, - retry: options.retry ?? config.DEFAULT_RETRY, - timeout: options.timeout ?? config.DEFAULT_TIMEOUT, + retry: options.retry, + timeout: options.timeout, }); - return t.refresh().then(() => t); }, /* * @deprecated Use tufUtils.client instead. */ getTarget: (path, options = {}) => { - return tufUtils.client(options).then((t) => t.getTarget(path)); + return tuf + .initTUF({ + mirrorURL: options.tufMirrorURL, + rootPath: options.tufRootPath, + cachePath: options.tufCachePath, + retry: options.retry, + timeout: options.timeout, + }) + .then((t) => t.getTarget(path)); }, }; exports.tuf = tufUtils; diff --git a/node_modules/sigstore/dist/tlog/format.js b/node_modules/sigstore/dist/tlog/format.js index 67077090455a1..b0eae95098af0 100644 --- a/node_modules/sigstore/dist/tlog/format.js +++ b/node_modules/sigstore/dist/tlog/format.js @@ -1,10 +1,22 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.toProposedIntotoEntry = exports.toProposedHashedRekordEntry = void 0; +exports.toProposedIntotoEntry = exports.toProposedHashedRekordEntry = exports.toProposedDSSEEntry = void 0; +const sigstore_1 = require("../types/sigstore"); const util_1 = require("../util"); -const types_1 = require("./types"); +const DEFAULT_DSSE_API_VERSION = '0.0.1'; const DEFAULT_HASHEDREKORD_API_VERSION = '0.0.1'; const DEFAULT_INTOTO_API_VERSION = '0.0.2'; +// Returns a properly formatted Rekor "dsse" entry for the given DSSE +// envelope and signature +function toProposedDSSEEntry(envelope, signature, apiVersion = DEFAULT_DSSE_API_VERSION) { + switch (apiVersion) { + case '0.0.1': + return toProposedDSSEV001Entry(envelope, signature); + default: + throw new Error(`Unsupported dsse kind API version: ${apiVersion}`); + } +} +exports.toProposedDSSEEntry = toProposedDSSEEntry; // Returns a properly formatted Rekor "hashedrekord" entry for the given digest // and signature function toProposedHashedRekordEntry(digest, signature) { @@ -13,7 +25,7 @@ function toProposedHashedRekordEntry(digest, signature) { const b64Key = util_1.encoding.base64Encode(toPublicKey(signature)); return { apiVersion: DEFAULT_HASHEDREKORD_API_VERSION, - kind: types_1.HASHEDREKORD_KIND, + kind: 'hashedrekord', spec: { data: { hash: { @@ -42,11 +54,23 @@ function toProposedIntotoEntry(envelope, signature, apiVersion = DEFAULT_INTOTO_ } } exports.toProposedIntotoEntry = toProposedIntotoEntry; +function toProposedDSSEV001Entry(envelope, signature) { + return { + apiVersion: '0.0.1', + kind: 'dsse', + spec: { + proposedContent: { + envelope: JSON.stringify(sigstore_1.Envelope.toJSON(envelope)), + verifiers: [util_1.encoding.base64Encode(toPublicKey(signature))], + }, + }, + }; +} function toProposedIntotoV002Entry(envelope, signature) { // Calculate the value for the payloadHash field in the Rekor entry const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex'); // Calculate the value for the hash field in the Rekor entry - const envelopeHash = calculateDSSEHash(envelope); + const envelopeHash = calculateDSSEHash(envelope, signature); // Collect values for re-creating the DSSE envelope. // Double-encode payload and signature cause that's what Rekor expects const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); @@ -56,7 +80,7 @@ function toProposedIntotoV002Entry(envelope, signature) { // Create the envelope portion of the entry. Note the inclusion of the // publicKey in the signature struct is not a standard part of a DSSE // envelope, but is required by Rekor. - const dsse = { + const dsseEnv = { payloadType: envelope.payloadType, payload: payload, signatures: [{ sig, publicKey }], @@ -65,14 +89,14 @@ function toProposedIntotoV002Entry(envelope, signature) { // need to do the same here so that we can properly recreate the entry for // verification. if (keyid.length > 0) { - dsse.signatures[0].keyid = keyid; + dsseEnv.signatures[0].keyid = keyid; } return { apiVersion: '0.0.2', - kind: types_1.INTOTO_KIND, + kind: 'intoto', spec: { content: { - envelope: dsse, + envelope: dsseEnv, hash: { algorithm: 'sha256', value: envelopeHash }, payloadHash: { algorithm: 'sha256', value: payloadHash }, }, @@ -86,17 +110,22 @@ function toProposedIntotoV002Entry(envelope, signature) { // * signature is base64 encoded (only the first signature is used) // * keyid is included ONLY if it is NOT an empty string // * The resulting JSON is canonicalized and hashed to a hex string -function calculateDSSEHash(envelope) { - const dsse = { +function calculateDSSEHash(envelope, signature) { + const dsseEnv = { payloadType: envelope.payloadType, payload: envelope.payload.toString('base64'), - signatures: [{ sig: envelope.signatures[0].sig.toString('base64') }], + signatures: [ + { + sig: envelope.signatures[0].sig.toString('base64'), + publicKey: toPublicKey(signature), + }, + ], }; // If the keyid is an empty string, Rekor seems to remove it altogether. if (envelope.signatures[0].keyid.length > 0) { - dsse.signatures[0].keyid = envelope.signatures[0].keyid; + dsseEnv.signatures[0].keyid = envelope.signatures[0].keyid; } - return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex'); + return util_1.crypto.hash(util_1.json.canonicalize(dsseEnv)).toString('hex'); } function toPublicKey(signature) { return signature.certificates diff --git a/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js b/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js deleted file mode 100644 index 61923a61cd8de..0000000000000 --- a/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js +++ /dev/null @@ -1,8 +0,0 @@ -"use strict"; -/* eslint-disable */ -/** - * This file was automatically generated by json-schema-to-typescript. - * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, - * and run json-schema-to-typescript to regenerate this file. - */ -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js b/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js deleted file mode 100644 index 61923a61cd8de..0000000000000 --- a/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js +++ /dev/null @@ -1,8 +0,0 @@ -"use strict"; -/* eslint-disable */ -/** - * This file was automatically generated by json-schema-to-typescript. - * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, - * and run json-schema-to-typescript to regenerate this file. - */ -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/sigstore/dist/tlog/types/index.js b/node_modules/sigstore/dist/tlog/types/index.js deleted file mode 100644 index d6394a95c8397..0000000000000 --- a/node_modules/sigstore/dist/tlog/types/index.js +++ /dev/null @@ -1,5 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.HASHEDREKORD_KIND = exports.INTOTO_KIND = void 0; -exports.INTOTO_KIND = 'intoto'; -exports.HASHEDREKORD_KIND = 'hashedrekord'; diff --git a/node_modules/sigstore/dist/tlog/verify/body.js b/node_modules/sigstore/dist/tlog/verify/body.js index 086e068a30dcb..5a265e5190c12 100644 --- a/node_modules/sigstore/dist/tlog/verify/body.js +++ b/node_modules/sigstore/dist/tlog/verify/body.js @@ -28,6 +28,9 @@ function verifyTLogBody(entry, bundleContent) { throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG); } switch (body.kind) { + case 'dsse': + verifyDSSETLogBody(body, bundleContent); + break; case 'intoto': verifyIntotoTLogBody(body, bundleContent); break; @@ -45,6 +48,20 @@ function verifyTLogBody(entry, bundleContent) { } exports.verifyTLogBody = verifyTLogBody; // Compare the given intoto tlog entry to the given bundle +function verifyDSSETLogBody(tlogEntry, content) { + if (content?.$case !== 'dsseEnvelope') { + throw new error_1.VerificationError(`unsupported bundle content: ${content?.$case || 'unknown'}`); + } + const dsse = content.dsseEnvelope; + switch (tlogEntry.apiVersion) { + case '0.0.1': + verifyDSSE001TLogBody(tlogEntry, dsse); + break; + default: + throw new error_1.VerificationError(`unsupported dsse version: ${tlogEntry.apiVersion}`); + } +} +// Compare the given intoto tlog entry to the given bundle function verifyIntotoTLogBody(tlogEntry, content) { if (content?.$case !== 'dsseEnvelope') { throw new error_1.VerificationError(`unsupported bundle content: ${content?.$case || 'unknown'}`); @@ -72,6 +89,28 @@ function verifyHashedRekordTLogBody(tlogEntry, content) { throw new error_1.VerificationError(`unsupported hashedrekord version: ${tlogEntry.apiVersion}`); } } +// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope. +function verifyDSSE001TLogBody(tlogEntry, dsse) { + // Collect all of the signatures from the DSSE envelope + // Turns them into base64-encoded strings for comparison + const dsseSigs = dsse.signatures.map((signature) => signature.sig.toString('base64')); + // Collect all of the signatures from the tlog entry + const tlogSigs = tlogEntry.spec.signatures?.map((signature) => signature.signature); + // Ensure the bundle's DSSE and the tlog entry contain the same number of signatures + if (dsseSigs.length !== tlogSigs?.length) { + throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG); + } + // Ensure that every signature in the bundle's DSSE is present in the tlog entry + if (!dsseSigs.every((dsseSig) => tlogSigs.includes(dsseSig))) { + throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG); + } + // Ensure the digest of the bundle's DSSE payload matches the digest in the + // tlog entry + const dssePayloadHash = util_1.crypto.hash(dsse.payload).toString('hex'); + if (dssePayloadHash !== tlogEntry.spec.payloadHash?.value) { + throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG); + } +} // Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope. function verifyIntoto002TLogBody(tlogEntry, dsse) { // Collect all of the signatures from the DSSE envelope diff --git a/node_modules/sigstore/dist/types/sigstore/index.js b/node_modules/sigstore/dist/types/sigstore/index.js index 544db63b002bf..4d9f6003744da 100644 --- a/node_modules/sigstore/dist/types/sigstore/index.js +++ b/node_modules/sigstore/dist/types/sigstore/index.js @@ -103,7 +103,8 @@ function toMessageSignatureBundle({ digest, signature, tlogEntry, timestamp, }) } exports.toMessageSignatureBundle = toMessageSignatureBundle; function toTransparencyLogEntry(entry) { - const set = Buffer.from(entry.verification.signedEntryTimestamp, 'base64'); + const b64SET = entry.verification?.signedEntryTimestamp || ''; + const set = Buffer.from(b64SET, 'base64'); const logID = Buffer.from(entry.logID, 'hex'); // Parse entry body so we can extract the kind and version. const bodyJSON = util_1.encoding.base64Decode(entry.body); diff --git a/node_modules/sigstore/dist/x509/asn1/dump.js b/node_modules/sigstore/dist/util/asn1/dump.js similarity index 100% rename from node_modules/sigstore/dist/x509/asn1/dump.js rename to node_modules/sigstore/dist/util/asn1/dump.js diff --git a/node_modules/sigstore/dist/x509/asn1/error.js b/node_modules/sigstore/dist/util/asn1/error.js similarity index 100% rename from node_modules/sigstore/dist/x509/asn1/error.js rename to node_modules/sigstore/dist/util/asn1/error.js diff --git a/node_modules/sigstore/dist/util/asn1/index.js b/node_modules/sigstore/dist/util/asn1/index.js new file mode 100644 index 0000000000000..348b2ea4022e5 --- /dev/null +++ b/node_modules/sigstore/dist/util/asn1/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var obj_1 = require("./obj"); +Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } }); diff --git a/node_modules/sigstore/dist/x509/asn1/length.js b/node_modules/sigstore/dist/util/asn1/length.js similarity index 100% rename from node_modules/sigstore/dist/x509/asn1/length.js rename to node_modules/sigstore/dist/util/asn1/length.js diff --git a/node_modules/sigstore/dist/x509/asn1/obj.js b/node_modules/sigstore/dist/util/asn1/obj.js similarity index 95% rename from node_modules/sigstore/dist/x509/asn1/obj.js rename to node_modules/sigstore/dist/util/asn1/obj.js index 712acf105adfc..5f9ac9cdbc493 100644 --- a/node_modules/sigstore/dist/x509/asn1/obj.js +++ b/node_modules/sigstore/dist/util/asn1/obj.js @@ -16,7 +16,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -const stream_1 = require("../../util/stream"); +const stream_1 = require("../stream"); const error_1 = require("./error"); const length_1 = require("./length"); const parse_1 = require("./parse"); @@ -132,7 +132,10 @@ function parseStream(stream) { function collectSubs(stream, len) { // Calculate end of object content const end = stream.position + len; - // Make sure there are enough bytes left in the stream + // Make sure there are enough bytes left in the stream. This should never + // happen, cause it'll get caught when the stream is sliced in parseStream. + // Leaving as an extra check just in case. + /* istanbul ignore if */ if (end > stream.length) { throw new error_1.ASN1ParseError('invalid length'); } diff --git a/node_modules/sigstore/dist/x509/asn1/parse.js b/node_modules/sigstore/dist/util/asn1/parse.js similarity index 100% rename from node_modules/sigstore/dist/x509/asn1/parse.js rename to node_modules/sigstore/dist/util/asn1/parse.js diff --git a/node_modules/sigstore/dist/x509/asn1/tag.js b/node_modules/sigstore/dist/util/asn1/tag.js similarity index 100% rename from node_modules/sigstore/dist/x509/asn1/tag.js rename to node_modules/sigstore/dist/util/asn1/tag.js diff --git a/node_modules/sigstore/dist/util/index.js b/node_modules/sigstore/dist/util/index.js index 74ef9c0b1121b..b7d6ce21aafd3 100644 --- a/node_modules/sigstore/dist/util/index.js +++ b/node_modules/sigstore/dist/util/index.js @@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.appdata = void 0; +exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.asn1 = void 0; /* Copyright 2022 The Sigstore Authors. @@ -39,7 +39,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -exports.appdata = __importStar(require("./appdata")); +exports.asn1 = __importStar(require("./asn1")); exports.crypto = __importStar(require("./crypto")); exports.dsse = __importStar(require("./dsse")); exports.encoding = __importStar(require("./encoding")); diff --git a/node_modules/sigstore/dist/util/stream.js b/node_modules/sigstore/dist/util/stream.js index d5c8236123cdf..b5c881bb388d4 100644 --- a/node_modules/sigstore/dist/util/stream.js +++ b/node_modules/sigstore/dist/util/stream.js @@ -112,5 +112,5 @@ class ByteStream { this.view = newView; } } -ByteStream.BLOCK_SIZE = 1024; exports.ByteStream = ByteStream; +ByteStream.BLOCK_SIZE = 1024; diff --git a/node_modules/sigstore/dist/x509/cert.js b/node_modules/sigstore/dist/x509/cert.js index 0b8ab54740a06..ec14b5f47369d 100644 --- a/node_modules/sigstore/dist/x509/cert.js +++ b/node_modules/sigstore/dist/x509/cert.js @@ -2,8 +2,8 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.x509Certificate = void 0; const util_1 = require("../util"); +const asn1_1 = require("../util/asn1"); const stream_1 = require("../util/stream"); -const obj_1 = require("./asn1/obj"); const ext_1 = require("./ext"); const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14'; const EXTENSION_OID_KEY_USAGE = '2.5.29.15'; @@ -33,7 +33,7 @@ class x509Certificate { } static parse(cert) { const der = typeof cert === 'string' ? util_1.pem.toDER(cert) : cert; - const asn1 = obj_1.ASN1Obj.parseBuffer(der); + const asn1 = asn1_1.ASN1Obj.parseBuffer(der); return new x509Certificate(asn1); } get tbsCertificate() { diff --git a/node_modules/sigstore/dist/x509/ext.js b/node_modules/sigstore/dist/x509/ext.js index c1743dce5556d..246aeb095802f 100644 --- a/node_modules/sigstore/dist/x509/ext.js +++ b/node_modules/sigstore/dist/x509/ext.js @@ -1,21 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.x509SCTExtension = exports.x509SubjectKeyIDExtension = exports.x509AuthorityKeyIDExtension = exports.x509SubjectAlternativeNameExtension = exports.x509KeyUsageExtension = exports.x509BasicConstraintsExtension = exports.x509Extension = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ const stream_1 = require("../util/stream"); const sct_1 = require("./sct"); // https://www.rfc-editor.org/rfc/rfc5280#section-4.1 diff --git a/node_modules/sigstore/package.json b/node_modules/sigstore/package.json index 2ca34e2a445ad..b7dc6e30f0dcd 100644 --- a/node_modules/sigstore/package.json +++ b/node_modules/sigstore/package.json @@ -1,10 +1,11 @@ { "name": "sigstore", - "version": "1.5.2", + "version": "1.6.0", "description": "code-signing for npm packages", "main": "dist/index.js", "types": "dist/index.d.ts", "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", "build": "tsc --build", "test": "jest" }, @@ -29,16 +30,20 @@ "provenance": true }, "devDependencies": { + "@sigstore/rekor-types": "^1.0.0", "@total-typescript/shoehorn": "^0.1.0", "@tufjs/repo-mock": "^1.1.0", "@types/make-fetch-happen": "^10.0.0", - "@types/node": "^20.0.0", + "@types/sigstore-jest-extended": "^0.0.0", + "@types/node": "^20.2.5", "json-schema-to-typescript": "^13.0.0", "nock": "^13.2.4", - "typescript": "^5.0.2" + "shx": "^0.3.3", + "typescript": "^5.1.3" }, "dependencies": { "@sigstore/protobuf-specs": "^0.1.0", + "@sigstore/tuf": "^1.0.0", "make-fetch-happen": "^11.0.1", "tuf-js": "^1.1.3" }, diff --git a/package-lock.json b/package-lock.json index 94a29d5db33f8..47b54422e6ff1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -140,7 +140,7 @@ "qrcode-terminal": "^0.12.0", "read": "^2.1.0", "semver": "^7.5.2", - "sigstore": "^1.5.0", + "sigstore": "^1.6.0", "ssri": "^10.0.4", "supports-color": "^9.3.1", "tar": "^6.1.14", @@ -2704,6 +2704,20 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@sigstore/tuf": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-1.0.0.tgz", + "integrity": "sha512-bLzi9GeZgMCvjJeLUIfs8LJYCxrPRA8IXQkzUtaFKKVPTz0mucRyqFcV2U20yg9K+kYAD0YSitzGfRZCFLjdHQ==", + "inBundle": true, + "dependencies": { + "@sigstore/protobuf-specs": "^0.1.0", + "make-fetch-happen": "^11.0.1", + "tuf-js": "^1.1.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@tootallnate/once": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", @@ -11447,12 +11461,13 @@ } }, "node_modules/sigstore": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-1.5.2.tgz", - "integrity": "sha512-X95v6xAAooVpn7PaB94TDmFeSO5SBfCtB1R23fvzr36WTfjtkiiyOeei979nbTjc8nzh6FSLeltQZuODsm1EjQ==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-1.6.0.tgz", + "integrity": "sha512-QODKff/qW/TXOZI6V/Clqu74xnInAS6it05mufj4/fSewexLtfEntgLZZcBtUK44CDQyUE5TUXYy1ARYzlfG9g==", "inBundle": true, "dependencies": { "@sigstore/protobuf-specs": "^0.1.0", + "@sigstore/tuf": "^1.0.0", "make-fetch-happen": "^11.0.1", "tuf-js": "^1.1.3" }, diff --git a/package.json b/package.json index fd2ef9c34e7c4..66698231fbd25 100644 --- a/package.json +++ b/package.json @@ -107,7 +107,7 @@ "qrcode-terminal": "^0.12.0", "read": "^2.1.0", "semver": "^7.5.2", - "sigstore": "^1.5.0", + "sigstore": "^1.6.0", "ssri": "^10.0.4", "supports-color": "^9.3.1", "tar": "^6.1.14", From 78d0088e59a0e6e8a1b90a6f306095f2656a986f Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:47:06 -0700 Subject: [PATCH 09/16] chore: tar@6.1.15 --- node_modules/tar/lib/normalize-unicode.js | 2 +- node_modules/tar/lib/path-reservations.js | 2 +- node_modules/tar/lib/unpack.js | 2 +- node_modules/tar/package.json | 2 +- package-lock.json | 8 ++++---- package.json | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/node_modules/tar/lib/normalize-unicode.js b/node_modules/tar/lib/normalize-unicode.js index 43dc406ecedb9..79e285ab30d57 100644 --- a/node_modules/tar/lib/normalize-unicode.js +++ b/node_modules/tar/lib/normalize-unicode.js @@ -6,7 +6,7 @@ const normalizeCache = Object.create(null) const { hasOwnProperty } = Object.prototype module.exports = s => { if (!hasOwnProperty.call(normalizeCache, s)) { - normalizeCache[s] = s.normalize('NFKD') + normalizeCache[s] = s.normalize('NFD') } return normalizeCache[s] } diff --git a/node_modules/tar/lib/path-reservations.js b/node_modules/tar/lib/path-reservations.js index ef380cab685e3..8d349d584513f 100644 --- a/node_modules/tar/lib/path-reservations.js +++ b/node_modules/tar/lib/path-reservations.js @@ -123,7 +123,7 @@ module.exports = () => { // effectively removing all parallelization on windows. paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => { // don't need normPath, because we skip this entirely for windows - return normalize(stripSlashes(join(p))).toLowerCase() + return stripSlashes(join(normalize(p))).toLowerCase() }) const dirs = new Set( diff --git a/node_modules/tar/lib/unpack.js b/node_modules/tar/lib/unpack.js index e341ad0c7239e..fa46611c0a19f 100644 --- a/node_modules/tar/lib/unpack.js +++ b/node_modules/tar/lib/unpack.js @@ -105,7 +105,7 @@ const uint32 = (a, b, c) => // Note that on windows, we always drop the entire cache whenever a // symbolic link is encountered, because 8.3 filenames are impossible // to reason about, and collisions are hazards rather than just failures. -const cacheKeyNormalize = path => normalize(stripSlash(normPath(path))) +const cacheKeyNormalize = path => stripSlash(normPath(normalize(path))) .toLowerCase() const pruneCache = (cache, abs) => { diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json index 943c8b4d06b94..f59f54ae837bd 100644 --- a/node_modules/tar/package.json +++ b/node_modules/tar/package.json @@ -2,7 +2,7 @@ "author": "GitHub Inc.", "name": "tar", "description": "tar for node", - "version": "6.1.14", + "version": "6.1.15", "repository": { "type": "git", "url": "https://github.com/isaacs/node-tar.git" diff --git a/package-lock.json b/package-lock.json index 47b54422e6ff1..ab7cbf5ab31c7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -143,7 +143,7 @@ "sigstore": "^1.6.0", "ssri": "^10.0.4", "supports-color": "^9.3.1", - "tar": "^6.1.14", + "tar": "^6.1.15", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^3.0.0", @@ -14176,9 +14176,9 @@ } }, "node_modules/tar": { - "version": "6.1.14", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.14.tgz", - "integrity": "sha512-piERznXu0U7/pW7cdSn7hjqySIVTYT6F76icmFk7ptU7dDYlXTm5r9A6K04R2vU3olYgoKeo1Cg3eeu5nhftAw==", + "version": "6.1.15", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.15.tgz", + "integrity": "sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==", "inBundle": true, "dependencies": { "chownr": "^2.0.0", diff --git a/package.json b/package.json index 66698231fbd25..d9a86c039de50 100644 --- a/package.json +++ b/package.json @@ -110,7 +110,7 @@ "sigstore": "^1.6.0", "ssri": "^10.0.4", "supports-color": "^9.3.1", - "tar": "^6.1.14", + "tar": "^6.1.15", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^3.0.0", From a20097ab2c4ffd5f0894bb6523ef0cea1b4d90ba Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:48:22 -0700 Subject: [PATCH 10/16] deps: is-core-module@2.12.1 --- node_modules/is-core-module/core.json | 2 +- node_modules/is-core-module/package.json | 2 +- package-lock.json | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/node_modules/is-core-module/core.json b/node_modules/is-core-module/core.json index 9a51663fc5ec7..af29f0b734b22 100644 --- a/node_modules/is-core-module/core.json +++ b/node_modules/is-core-module/core.json @@ -114,7 +114,7 @@ "node:string_decoder": [">= 14.18 && < 15", ">= 16"], "sys": [">= 0.4 && < 0.7", ">= 0.8"], "node:sys": [">= 14.18 && < 15", ">= 16"], - "test/reporters": [">= 19.9", ">= 20"], + "test/reporters": ">= 19.9 && < 20.2", "node:test/reporters": [">= 19.9", ">= 20"], "node:test": [">= 16.17 && < 17", ">= 18"], "timers": true, diff --git a/node_modules/is-core-module/package.json b/node_modules/is-core-module/package.json index 715299bd8d5c3..62bb065e8364a 100644 --- a/node_modules/is-core-module/package.json +++ b/node_modules/is-core-module/package.json @@ -1,6 +1,6 @@ { "name": "is-core-module", - "version": "2.12.0", + "version": "2.12.1", "description": "Is this specifier a node.js core module?", "main": "index.js", "sideEffects": false, diff --git a/package-lock.json b/package-lock.json index ab7cbf5ab31c7..77c9c2eb2aa67 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6731,9 +6731,9 @@ } }, "node_modules/is-core-module": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.0.tgz", - "integrity": "sha512-RECHCBCd/viahWmwj6enj19sKbHfJrddi/6cBDsNTKbNq0f7VeaUkBo60BqzvPqo/W54ChS62Z5qyun7cfOMqQ==", + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.1.tgz", + "integrity": "sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg==", "inBundle": true, "dependencies": { "has": "^1.0.3" From d836099aabb05e5b061c95361c1a814f28073736 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:52:57 -0700 Subject: [PATCH 11/16] deps: jackspeak@2.2.1 --- node_modules/jackspeak/package.json | 2 +- package-lock.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/node_modules/jackspeak/package.json b/node_modules/jackspeak/package.json index 8d85e7f09e915..afaa43e20ae4a 100644 --- a/node_modules/jackspeak/package.json +++ b/node_modules/jackspeak/package.json @@ -1,6 +1,6 @@ { "name": "jackspeak", - "version": "2.2.0", + "version": "2.2.1", "description": "A very strict and proper argument parser.", "main": "./dist/cjs/index.js", "module": "./dist/mjs/index.js", diff --git a/package-lock.json b/package-lock.json index 77c9c2eb2aa67..78aa57332c843 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7149,9 +7149,9 @@ } }, "node_modules/jackspeak": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.2.0.tgz", - "integrity": "sha512-r5XBrqIJfwRIjRt/Xr5fv9Wh09qyhHfKnYddDlpM+ibRR20qrYActpCAgU6U+d53EOEjzkvxPMVHSlgR7leXrQ==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.2.1.tgz", + "integrity": "sha512-MXbxovZ/Pm42f6cDIDkl3xpwv1AGwObKwfmjs2nQePiy85tP3fatofl3FC1aBsOtP/6fq5SbtgHwWcMsLP+bDw==", "inBundle": true, "dependencies": { "@isaacs/cliui": "^8.0.2" From 83bbe7e7699dd8b8d062dece8cfdcebf4efdb985 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:54:45 -0700 Subject: [PATCH 12/16] deps: read-package-json@6.0.4 --- node_modules/read-package-json/lib/read-json.js | 2 +- node_modules/read-package-json/package.json | 14 +++++++------- package-lock.json | 6 +++--- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/node_modules/read-package-json/lib/read-json.js b/node_modules/read-package-json/lib/read-json.js index aaf24e94a7375..d35f09ebd208f 100644 --- a/node_modules/read-package-json/lib/read-json.js +++ b/node_modules/read-package-json/lib/read-json.js @@ -352,7 +352,7 @@ function bins (file, data, cb) { return cb(null, data) } - m = path.resolve(path.dirname(file), m) + m = path.resolve(path.dirname(file), path.join('.', path.join('/', m))) glob('**', { cwd: m }) .then(binsGlob => bins_(file, data, binsGlob, cb)) .catch(er => cb(er)) diff --git a/node_modules/read-package-json/package.json b/node_modules/read-package-json/package.json index a2f93088f37fc..90ab321d51743 100644 --- a/node_modules/read-package-json/package.json +++ b/node_modules/read-package-json/package.json @@ -1,6 +1,6 @@ { "name": "read-package-json", - "version": "6.0.3", + "version": "6.0.4", "author": "GitHub Inc.", "description": "The thing npm uses to read package.json files with semantics and defaults and validation", "repository": { @@ -30,7 +30,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.15.1", "tap": "^16.0.1" }, "license": "ISC", @@ -42,10 +42,10 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "tap": { - "branches": 68, - "functions": 74, - "lines": 74, - "statements": 74, + "branches": 73, + "functions": 77, + "lines": 77, + "statements": 77, "nyc-arg": [ "--exclude", "tap-snapshots/**" @@ -53,7 +53,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.15.1", "publish": "true" } } diff --git a/package-lock.json b/package-lock.json index 78aa57332c843..acb05ba5fd957 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10614,9 +10614,9 @@ } }, "node_modules/read-package-json": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-6.0.3.tgz", - "integrity": "sha512-4QbpReW4kxFgeBQ0vPAqh2y8sXEB3D4t3jsXbJKIhBiF80KT6XRo45reqwtftju5J6ru1ax06A2Gb/wM1qCOEQ==", + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-6.0.4.tgz", + "integrity": "sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw==", "inBundle": true, "dependencies": { "glob": "^10.2.2", From 10b474d219840330848cbf1f64b4ce2ea8e8ab9b Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:55:12 -0700 Subject: [PATCH 13/16] deps: path-scurry@1.9.2 --- node_modules/path-scurry/package.json | 4 ++-- package-lock.json | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/node_modules/path-scurry/package.json b/node_modules/path-scurry/package.json index 677bf1ce9b6e5..5b900825e44e0 100644 --- a/node_modules/path-scurry/package.json +++ b/node_modules/path-scurry/package.json @@ -1,6 +1,6 @@ { "name": "path-scurry", - "version": "1.9.1", + "version": "1.9.2", "description": "walk paths fast and efficiently", "author": "Isaac Z. Schlueter (https://blog.izs.me)", "main": "./dist/cjs/index.js", @@ -82,6 +82,6 @@ }, "dependencies": { "lru-cache": "^9.1.1", - "minipass": "^5.0.0 || ^6.0.0" + "minipass": "^5.0.0 || ^6.0.2" } } diff --git a/package-lock.json b/package-lock.json index acb05ba5fd957..dee41b223fd55 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10263,13 +10263,13 @@ "dev": true }, "node_modules/path-scurry": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.9.1.tgz", - "integrity": "sha512-UgmoiySyjFxP6tscZDgWGEAgsW5ok8W3F5CJDnnH2pozwSTGE6eH7vwTotMwATWA2r5xqdkKdxYPkwlJjAI/3g==", + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.9.2.tgz", + "integrity": "sha512-qSDLy2aGFPm8i4rsbHd4MNyTcrzHFsLQykrtbuGRknZZCBBVXSv2tSCDN2Cg6Rt/GFRw8GoW9y9Ecw5rIPG1sg==", "inBundle": true, "dependencies": { "lru-cache": "^9.1.1", - "minipass": "^5.0.0 || ^6.0.0" + "minipass": "^5.0.0 || ^6.0.2" }, "engines": { "node": ">=16 || 14 >=14.17" From cc09c1f0b9f030e311e41b59c1ab54008fe610ff Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:56:38 -0700 Subject: [PATCH 14/16] deps: strip-ansi@7.1.0 --- .../@isaacs/cliui/node_modules/strip-ansi/index.js | 7 ++++++- .../cliui/node_modules/strip-ansi/package.json | 2 +- .../wrap-ansi/node_modules/strip-ansi/index.js | 7 ++++++- .../wrap-ansi/node_modules/strip-ansi/package.json | 2 +- package-lock.json | 12 ++++++------ 5 files changed, 20 insertions(+), 10 deletions(-) diff --git a/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js b/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js index ef3c095f5fd42..ba19750e64e06 100644 --- a/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js +++ b/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js @@ -1,9 +1,14 @@ import ansiRegex from 'ansi-regex'; +const regex = ansiRegex(); + export default function stripAnsi(string) { if (typeof string !== 'string') { throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``); } - return string.replace(ansiRegex(), ''); + // Even though the regex is global, we don't need to reset the `.lastIndex` + // because unlike `.exec()` and `.test()`, `.replace()` does it automatically + // and doing it manually has a performance penalty. + return string.replace(regex, ''); } diff --git a/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json b/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json index 0de0586f7eef1..e1f455c325b00 100644 --- a/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json +++ b/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json @@ -1,6 +1,6 @@ { "name": "strip-ansi", - "version": "7.0.1", + "version": "7.1.0", "description": "Strip ANSI escape codes from a string", "license": "MIT", "repository": "chalk/strip-ansi", diff --git a/node_modules/wrap-ansi/node_modules/strip-ansi/index.js b/node_modules/wrap-ansi/node_modules/strip-ansi/index.js index ef3c095f5fd42..ba19750e64e06 100644 --- a/node_modules/wrap-ansi/node_modules/strip-ansi/index.js +++ b/node_modules/wrap-ansi/node_modules/strip-ansi/index.js @@ -1,9 +1,14 @@ import ansiRegex from 'ansi-regex'; +const regex = ansiRegex(); + export default function stripAnsi(string) { if (typeof string !== 'string') { throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``); } - return string.replace(ansiRegex(), ''); + // Even though the regex is global, we don't need to reset the `.lastIndex` + // because unlike `.exec()` and `.test()`, `.replace()` does it automatically + // and doing it manually has a performance penalty. + return string.replace(regex, ''); } diff --git a/node_modules/wrap-ansi/node_modules/strip-ansi/package.json b/node_modules/wrap-ansi/node_modules/strip-ansi/package.json index 0de0586f7eef1..e1f455c325b00 100644 --- a/node_modules/wrap-ansi/node_modules/strip-ansi/package.json +++ b/node_modules/wrap-ansi/node_modules/strip-ansi/package.json @@ -1,6 +1,6 @@ { "name": "strip-ansi", - "version": "7.0.1", + "version": "7.1.0", "description": "Strip ANSI escape codes from a string", "license": "MIT", "repository": "chalk/strip-ansi", diff --git a/package-lock.json b/package-lock.json index dee41b223fd55..e1a2a119dbe83 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1329,9 +1329,9 @@ } }, "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "inBundle": true, "dependencies": { "ansi-regex": "^6.0.1" @@ -15231,9 +15231,9 @@ } }, "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", - "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "inBundle": true, "dependencies": { "ansi-regex": "^6.0.1" From 49417366beca68ee0403150917b8cc36ab3550c4 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 07:57:14 -0700 Subject: [PATCH 15/16] deps: tuf-js@1.1.7 --- node_modules/tuf-js/dist/updater.js | 4 +++- node_modules/tuf-js/package.json | 10 +++++----- package-lock.json | 8 ++++---- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/node_modules/tuf-js/dist/updater.js b/node_modules/tuf-js/dist/updater.js index 71fa4981e3122..2aba48d24affd 100644 --- a/node_modules/tuf-js/dist/updater.js +++ b/node_modules/tuf-js/dist/updater.js @@ -54,6 +54,8 @@ class Updater { retries: this.config.fetchRetries, }); } + // refresh and load the metadata before downloading the target + // refresh should be called once after the client is initialized async refresh() { await this.loadRoot(); await this.loadTimestamp(); @@ -102,7 +104,7 @@ class Updater { } try { if (fs.existsSync(filePath)) { - targetInfo.verify(fs.createReadStream(filePath)); + await targetInfo.verify(fs.createReadStream(filePath)); return filePath; } } diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json index c1134af2b2ff3..9187d88083272 100644 --- a/node_modules/tuf-js/package.json +++ b/node_modules/tuf-js/package.json @@ -1,6 +1,6 @@ { "name": "tuf-js", - "version": "1.1.6", + "version": "1.1.7", "description": "JavaScript implementation of The Update Framework (TUF)", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -29,16 +29,16 @@ "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", "devDependencies": { "@tufjs/repo-mock": "1.3.1", - "@types/debug": "^4.1.7", + "@types/debug": "^4.1.8", "@types/make-fetch-happen": "^10.0.1", - "@types/node": "^20.1.1", + "@types/node": "^20.2.5", "nock": "^13.3.1", - "typescript": "^5.0.4" + "typescript": "^5.1.3" }, "dependencies": { "@tufjs/models": "1.0.4", "debug": "^4.3.4", - "make-fetch-happen": "^11.1.0" + "make-fetch-happen": "^11.1.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/package-lock.json b/package-lock.json index e1a2a119dbe83..360248d47040a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14541,14 +14541,14 @@ } }, "node_modules/tuf-js": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.6.tgz", - "integrity": "sha512-CXwFVIsXGbVY4vFiWF7TJKWmlKJAT8TWkH4RmiohJRcDJInix++F0dznDmoVbtJNzZ8yLprKUG4YrDIhv3nBMg==", + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.7.tgz", + "integrity": "sha512-i3P9Kgw3ytjELUfpuKVDNBJvk4u5bXL6gskv572mcevPbSKCV3zt3djhmlEQ65yERjIbOSncy7U4cQJaB1CBCg==", "inBundle": true, "dependencies": { "@tufjs/models": "1.0.4", "debug": "^4.3.4", - "make-fetch-happen": "^11.1.0" + "make-fetch-happen": "^11.1.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" From 0feddbe5162e5784f8f712a8cf6e6d0db0a368ae Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 19 Jun 2023 08:50:28 -0700 Subject: [PATCH 16/16] chore: remove unused minify-registry-metadata --- package-lock.json | 1 - workspaces/libnpmexec/package.json | 1 - 2 files changed, 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index 360248d47040a..297ce2e9d9f3b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15650,7 +15650,6 @@ "chalk": "^5.2.0", "just-extend": "^6.2.0", "just-safe-set": "^4.2.1", - "minify-registry-metadata": "^3.0.0", "tap": "^16.3.4" }, "engines": { diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index 1fa85ff033b8c..b3f09343a6fd2 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -56,7 +56,6 @@ "chalk": "^5.2.0", "just-extend": "^6.2.0", "just-safe-set": "^4.2.1", - "minify-registry-metadata": "^3.0.0", "tap": "^16.3.4" }, "dependencies": {