From f4eee3323fd04f366d670ad2ee9078a1e9fe3c4b Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 14 Mar 2022 13:46:54 -0700 Subject: [PATCH 01/11] deps: @npmcli/metavuln-calculator@3.0.1 --- .../node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 ++ .../node_modules/cacache/lib/content/read.js | 259 +++++++++++ .../node_modules/cacache/lib/content/rm.js | 20 + .../node_modules/cacache/lib/content/write.js | 194 +++++++++ .../node_modules/cacache/lib/entry-index.js | 412 ++++++++++++++++++ .../node_modules/cacache/lib/get.js | 251 +++++++++++ .../node_modules/cacache/lib/index.js | 45 ++ .../node_modules/cacache/lib/memoization.js | 74 ++++ .../node_modules/cacache/lib/put.js | 87 ++++ .../node_modules/cacache/lib/rm.js | 31 ++ .../node_modules/cacache/lib/util/disposer.js | 31 ++ .../cacache/lib/util/fix-owner.js | 148 +++++++ .../cacache/lib/util/hash-to-segments.js | 7 + .../cacache/lib/util/move-file.js | 69 +++ .../node_modules/cacache/lib/util/tmp.js | 35 ++ .../node_modules/cacache/lib/verify.js | 291 +++++++++++++ .../node_modules/cacache/package.json | 88 ++++ .../@npmcli/metavuln-calculator/package.json | 10 +- package-lock.json | 79 +++- workspaces/arborist/package.json | 2 +- 21 files changed, 2160 insertions(+), 18 deletions(-) create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/disposer.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/fix-owner.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/move-file.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..8bffb2af83cab --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js @@ -0,0 +1,259 @@ +'use strict' + +const util = require('util') + +const fs = require('fs') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +const lstat = util.promisify(fs.lstat) +const readFile = util.promisify(fs.readFile) + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +function read (cache, integrity, opts = {}) { + const { size } = opts + return withContentSri(cache, integrity, (cpath, sri) => { + // get size + return lstat(cpath).then(stat => ({ stat, cpath, sri })) + }).then(({ stat, cpath, sri }) => { + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + return readFile(cpath, null).then((data) => { + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data + }) + }) +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.sync = readSync + +function readSync (cache, integrity, opts = {}) { + const { size } = opts + return withContentSriSync(cache, integrity, (cpath, sri) => { + const data = fs.readFileSync(cpath) + if (typeof size === 'number' && size !== data.length) { + throw sizeError(size, data.length) + } + + if (ssri.checkData(data, sri)) { + return data + } + + throw integrityError(sri, cpath) + }) +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + withContentSri(cache, integrity, (cpath, sri) => { + // just lstat to ensure it exists + return lstat(cpath).then((stat) => ({ stat, cpath, sri })) + }).then(({ stat, cpath, sri }) => { + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + readPipeline(cpath, stat.size, sri, stream) + }, er => stream.emit('error', er)) + + return stream +} + +let copyFile +if (fs.copyFile) { + module.exports.copy = copy + module.exports.copy.sync = copySync + copyFile = util.promisify(fs.copyFile) +} + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return copyFile(cpath, dest) + }) +} + +function copySync (cache, integrity, dest) { + return withContentSriSync(cache, integrity, (cpath, sri) => { + return fs.copyFileSync(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +function hasContent (cache, integrity) { + if (!integrity) { + return Promise.resolve(false) + } + + return withContentSri(cache, integrity, (cpath, sri) => { + return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) + }).catch((err) => { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + }) +} + +module.exports.hasContent.sync = hasContentSync + +function hasContentSync (cache, integrity) { + if (!integrity) { + return false + } + + return withContentSriSync(cache, integrity, (cpath, sri) => { + try { + const stat = fs.lstatSync(cpath) + return { size: stat.size, sri, stat } + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } + }) +} + +function withContentSri (cache, integrity, fn) { + const tryFn = () => { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + return Promise + .all(digests.map((meta) => { + return withContentSri(cache, meta, fn) + .catch((err) => { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + }) + })) + .then((results) => { + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + }) + } + } + + return new Promise((resolve, reject) => { + try { + tryFn() + .then(resolve) + .catch(reject) + } catch (err) { + reject(err) + } + }) +} + +function withContentSriSync (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + let lastErr = null + for (const meta of digests) { + try { + return withContentSriSync(cache, meta, fn) + } catch (err) { + lastErr = err + } + } + throw lastErr + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..50612364e9b48 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,20 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const { hasContent } = require('./read') +const rimraf = util.promisify(require('rimraf')) + +module.exports = rm + +function rm (cache, integrity) { + return hasContent(cache, integrity).then((content) => { + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + return rimraf(contentPath(cache, content.sri)).then(() => true) + } else { + return false + } + }) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..a71e81ad5e150 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js @@ -0,0 +1,194 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const fixOwner = require('../util/fix-owner') +const fs = require('fs') +const moveFile = require('../util/move-file') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const { disposer } = require('./../util/disposer') +const fsm = require('fs-minipass') + +const writeFile = util.promisify(fs.writeFile) + +module.exports = write + +function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + if (algorithms && algorithms.length > 1) { + throw new Error('opts.algorithms only supports a single algorithm for now') + } + + if (typeof size === 'number' && data.length !== size) { + return Promise.reject(sizeError(size, data.length)) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + return Promise.reject(checksumError(integrity, sri)) + } + + return disposer(makeTmp(cache, opts), makeTmpDisposer, + (tmp) => { + return writeFile(tmp.target, data, { flag: 'wx' }) + .then(() => moveToDestination(tmp, cache, sri, opts)) + }) + .then(() => ({ integrity: sri, size: data.length })) +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +function handleContent (inputStream, cache, opts) { + return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { + return pipeToTmp(inputStream, cache, tmp.target, opts) + .then((res) => { + return moveToDestination( + tmp, + cache, + res.integrity, + opts + ).then(() => res) + }) + }) +} + +function pipeToTmp (inputStream, cache, tmpTarget, opts) { + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + // NB: this can throw if the hashStream has a problem with + // it, and the data is fully written. but pipeToTmp is only + // called in promisory contexts where that is handled. + const pipeline = new Pipeline( + inputStream, + hashStream, + outStream + ) + + return pipeline.promise() + .then(() => ({ integrity, size })) + .catch(er => rimraf(tmpTarget).then(() => { + throw er + })) +} + +function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ + target: tmpTarget, + moved: false, + })) +} + +function makeTmpDisposer (tmp) { + if (tmp.moved) { + return Promise.resolve() + } + + return rimraf(tmp.target) +} + +function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + + return fixOwner + .mkdirfix(cache, destDir) + .then(() => { + return moveFile(tmp.target, destination) + }) + .then(() => { + tmp.moved = true + return fixOwner.chownr(cache, destination) + }) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..426778b850963 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,412 @@ +'use strict' + +const util = require('util') +const crypto = require('crypto') +const fs = require('fs') +const Minipass = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const { disposer } = require('./util/disposer') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const moveFile = require('@npmcli/move-file') +const _rimraf = require('rimraf') +const rimraf = util.promisify(_rimraf) +rimraf.sync = _rimraf.sync + +const appendFile = util.promisify(fs.appendFile) +const readFile = util.promisify(fs.readFile) +const readdir = util.promisify(fs.readdir) +const writeFile = util.promisify(fs.writeFile) + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fixOwner.mkdirfix(cache, path.dirname(target)) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rimraf(tmp.target) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + try { + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + } + + // write the file atomically + await disposer(setup(), teardown, write) + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +function insert (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + return fixOwner + .mkdirfix(cache, path.dirname(bucket)) + .then(() => { + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + }) + .then(() => fixOwner.chownr(cache, bucket)) + .catch((err) => { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + // There's a class of race conditions that happen when things get deleted + // during fixOwner, or between the two mkdirfix/chownr calls. + // + // It's perfectly fine to just not bother in those cases and lie + // that the index entry was written. Because it's a cache. + }) + .then(() => { + return formatEntry(cache, entry) + }) +} + +module.exports.insert.sync = insertSync + +function insertSync (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + try { + fixOwner.chownr.sync(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +function find (cache, key) { + const bucket = bucketPath(cache, key) + return bucketEntries(bucket) + .then((entries) => { + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + }) +} + +module.exports.find.sync = findSync + +function findSync (cache, key) { + const bucket = bucketPath(cache, key) + try { + return bucketEntriesSync(bucket).reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rimraf(bucket) +} + +module.exports.delete.sync = delSync + +function delSync (cache, key, opts = {}) { + if (!opts.removeFully) { + return insertSync(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rimraf.sync(bucket) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + readdirOrEmpty(indexDir).then(buckets => Promise.all( + buckets.map(bucket => { + const bucketPath = path.join(indexDir, bucket) + return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( + subbuckets.map(subbucket => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + return readdirOrEmpty(subbucketPath).then(entries => Promise.all( + entries.map(entry => { + const entryPath = path.join(subbucketPath, entry) + return bucketEntries(entryPath).then(entries => + // using a Map here prevents duplicate keys from + // showing up twice, I guess? + entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + ).then(reduced => { + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + }).catch(err => { + if (err.code === 'ENOENT') { + return undefined + } + throw err + }) + }) + )) + }) + )) + }) + )) + .then( + () => stream.end(), + err => stream.emit('error', err) + ) + + return stream +} + +module.exports.ls = ls + +function ls (cache) { + return lsStream(cache).collect().then(entries => + entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) + ) +} + +module.exports.bucketEntries = bucketEntries + +function bucketEntries (bucket, filter) { + return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) +} + +module.exports.bucketEntries.sync = bucketEntriesSync + +function bucketEntriesSync (bucket, filter) { + const data = fs.readFileSync(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (e) { + // Entry is corrupted! + return + } + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..d9d4bf4c6416f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js @@ -0,0 +1,251 @@ +'use strict' + +const Collect = require('minipass-collect') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const fs = require('fs') +const util = require('util') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +const writeFile = util.promisify(fs.writeFile) + +function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve({ + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + }) + } + + return index.find(cache, key, opts).then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + return read(cache, entry.integrity, { integrity, size }).then((data) => { + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) +} +module.exports = getData + +function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized) + } + + return read(cache, key, { integrity, size }).then((res) => { + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res + }) +} +module.exports.byDigest = getDataByDigest + +function getDataSync (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + const entry = index.find.sync(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = read.sync(cache, entry.integrity, { + integrity: integrity, + size: size, + }) + const res = { + metadata: entry.metadata, + data: data, + size: entry.size, + integrity: entry.integrity, + } + if (memoize) { + memo.put(cache, entry, res.data, opts) + } + + return res +} + +module.exports.sync = getDataSync + +function getDataByDigestSync (cache, digest, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, digest, opts) + + if (memoized && memoize !== false) { + return memoized + } + + const res = read.sync(cache, digest, { + integrity: integrity, + size: size, + }) + if (memoize) { + memo.put.byDigest(cache, digest, res, opts) + } + + return res +} +module.exports.sync.byDigest = getDataByDigestSync + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + index + .find(cache, key) + .then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + }) + .catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +function copy (cache, key, dest, opts = {}) { + if (read.copy) { + return index.find(cache, key, opts).then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + return read.copy(cache, entry.integrity, dest, opts) + .then(() => { + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) + } + + return getData(cache, key, opts).then((res) => { + return writeFile(dest, res.data).then(() => { + return { + metadata: res.metadata, + size: res.size, + integrity: res.integrity, + } + }) + }) +} +module.exports.copy = copy + +function copyByDigest (cache, key, dest, opts = {}) { + if (read.copy) { + return read.copy(cache, key, dest, opts).then(() => key) + } + + return getDataByDigest(cache, key, opts).then((res) => { + return writeFile(dest, res).then(() => key) + }) +} +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..1c56be68dd8fd --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js @@ -0,0 +1,45 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.sync = get.sync +module.exports.get.sync.byDigest = get.sync.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent +module.exports.get.hasContent.sync = get.hasContent.sync + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..e1b13dd5fd528 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js @@ -0,0 +1,74 @@ +'use strict' + +const LRU = require('lru-cache') + +const MAX_SIZE = 50 * 1024 * 1024 // 50MB +const MAX_AGE = 3 * 60 * 1000 + +const MEMOIZED = new LRU({ + max: MAX_SIZE, + maxAge: MAX_AGE, + length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.reset() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..d6904fa301272 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js @@ -0,0 +1,87 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + return write(cache, data, opts).then((res) => { + return index + .insert(cache, key, res.integrity, { ...opts, size: res.size }) + .then((entry) => { + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity + }) + }) +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + flush () { + return index + .insert(cache, key, integrity, { ...opts, size }) + .then((entry) => { + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + + if (integrity) { + pipeline.emit('integrity', integrity) + } + + if (size) { + pipeline.emit('size', size) + } + }) + }, + })) + + return pipeline +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..5f00071770b8d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const util = require('util') + +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +function all (cache) { + memo.clearMemoized() + return rimraf(path.join(cache, '*(content-*|index-*)')) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/disposer.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/disposer.js new file mode 100644 index 0000000000000..52d7d3edda7d5 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/disposer.js @@ -0,0 +1,31 @@ +'use strict' + +module.exports.disposer = disposer + +function disposer (creatorFn, disposerFn, fn) { + const runDisposer = (resource, result, shouldThrow = false) => { + return disposerFn(resource) + .then( + // disposer resolved, do something with original fn's promise + () => { + if (shouldThrow) { + throw result + } + + return result + }, + // Disposer fn failed, crash process + (err) => { + throw err + // Or process.exit? + }) + } + + return creatorFn + .then((resource) => { + // fn(resource) can throw, so wrap in a promise here + return Promise.resolve().then(() => fn(resource)) + .then((result) => runDisposer(resource, result)) + .catch((err) => runDisposer(resource, err, true)) + }) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/fix-owner.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/fix-owner.js new file mode 100644 index 0000000000000..bc14def4e405c --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/fix-owner.js @@ -0,0 +1,148 @@ +'use strict' + +const util = require('util') + +const chownr = util.promisify(require('chownr')) +const mkdirp = require('mkdirp') +const inflight = require('promise-inflight') +const inferOwner = require('infer-owner') + +// Memoize getuid()/getgid() calls. +// patch process.setuid/setgid to invalidate cached value on change +const self = { uid: null, gid: null } +const getSelf = () => { + if (typeof self.uid !== 'number') { + self.uid = process.getuid() + const setuid = process.setuid + process.setuid = (uid) => { + self.uid = null + process.setuid = setuid + return process.setuid(uid) + } + } + if (typeof self.gid !== 'number') { + self.gid = process.getgid() + const setgid = process.setgid + process.setgid = (gid) => { + self.gid = null + process.setgid = setgid + return process.setgid(gid) + } + } +} + +module.exports.chownr = fixOwner + +function fixOwner (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return Promise.resolve() + } + + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return Promise.resolve() + } + + return Promise.resolve(inferOwner(cache)).then((owner) => { + const { uid, gid } = owner + + // No need to override if it's already what we used. + if (self.uid === uid && self.gid === gid) { + return + } + + return inflight('fixOwner: fixing ownership on ' + filepath, () => + chownr( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ).catch((err) => { + if (err.code === 'ENOENT') { + return null + } + + throw err + }) + ) + }) +} + +module.exports.chownr.sync = fixOwnerSync + +function fixOwnerSync (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return + } + const { uid, gid } = inferOwner.sync(cache) + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return + } + + if (self.uid === uid && self.gid === gid) { + // No need to override if it's already what we used. + return + } + try { + chownr.sync( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ) + } catch (err) { + // only catch ENOENT, any other error is a problem. + if (err.code === 'ENOENT') { + return null + } + + throw err + } +} + +module.exports.mkdirfix = mkdirfix + +function mkdirfix (cache, p, cb) { + // we have to infer the owner _before_ making the directory, even though + // we aren't going to use the results, since the cache itself might not + // exist yet. If we mkdirp it, then our current uid/gid will be assumed + // to be correct if it creates the cache folder in the process. + return Promise.resolve(inferOwner(cache)).then(() => { + return mkdirp(p) + .then((made) => { + if (made) { + return fixOwner(cache, made).then(() => made) + } + }) + .catch((err) => { + if (err.code === 'EEXIST') { + return fixOwner(cache, p).then(() => null) + } + + throw err + }) + }) +} + +module.exports.mkdirfix.sync = mkdirfixSync + +function mkdirfixSync (cache, p) { + try { + inferOwner.sync(cache) + const made = mkdirp.sync(p) + if (made) { + fixOwnerSync(cache, made) + return made + } + } catch (err) { + if (err.code === 'EEXIST') { + fixOwnerSync(cache, p) + return null + } else { + throw err + } + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/move-file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/move-file.js new file mode 100644 index 0000000000000..3739cea3df281 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/move-file.js @@ -0,0 +1,69 @@ +'use strict' + +const fs = require('fs') +const util = require('util') +const chmod = util.promisify(fs.chmod) +const unlink = util.promisify(fs.unlink) +const stat = util.promisify(fs.stat) +const move = require('@npmcli/move-file') +const pinflight = require('promise-inflight') + +module.exports = moveFile + +function moveFile (src, dest) { + const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || + process.platform === 'win32' + + // This isn't quite an fs.rename -- the assumption is that + // if `dest` already exists, and we get certain errors while + // trying to move it, we should just not bother. + // + // In the case of cache corruption, users will receive an + // EINTEGRITY error elsewhere, and can remove the offending + // content their own way. + // + // Note that, as the name suggests, this strictly only supports file moves. + return new Promise((resolve, reject) => { + fs.link(src, dest, (err) => { + if (err) { + if (isWindows && err.code === 'EPERM') { + // XXX This is a really weird way to handle this situation, as it + // results in the src file being deleted even though the dest + // might not exist. Since we pretty much always write files to + // deterministic locations based on content hash, this is likely + // ok (or at worst, just ends in a future cache miss). But it would + // be worth investigating at some time in the future if this is + // really what we want to do here. + return resolve() + } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { + // file already exists, so whatever + return resolve() + } else { + return reject(err) + } + } else { + return resolve() + } + }) + }) + .then(() => { + // content should never change for any reason, so make it read-only + return Promise.all([ + unlink(src), + !isWindows && chmod(dest, '0444'), + ]) + }) + .catch(() => { + return pinflight('cacache-move-file:' + dest, () => { + return stat(dest).catch((err) => { + if (err.code !== 'ENOENT') { + // Something else is wrong here. Bail bail bail + throw err + } + // file doesn't already exist! let's try a rename -> copy fallback + // only delete if it successfully copies + return move(src, dest) + }) + }) + }) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0a5a50eba3061 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,35 @@ +'use strict' + +const fs = require('@npmcli/fs') + +const fixOwner = require('./fix-owner') +const path = require('path') + +module.exports.mkdir = mktmpdir + +function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + .then(() => { + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) + }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) +} + +module.exports.fix = fixtmpdir + +function fixtmpdir (cache) { + return fixOwner(cache, path.join(cache, 'tmp')) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..300cd9f9de1c4 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js @@ -0,0 +1,291 @@ +'use strict' + +const util = require('util') + +const pMap = require('p-map') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const fs = require('fs') +const fsm = require('fs-minipass') +const glob = util.promisify(require('glob')) +const index = require('./entry-index') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const stat = util.promisify(fs.stat) +const truncate = util.promisify(fs.truncate) +const writeFile = util.promisify(fs.writeFile) +const readFile = util.promisify(fs.readFile) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + return steps + .reduce((promise, step, i) => { + const label = step.name + const start = new Date() + return promise.then((stats) => { + return step(cache, opts).then((s) => { + s && + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + + stats.runTime[label] = end - start + return Promise.resolve(stats) + }) + }) + }, Promise.resolve({})) + .then((stats) => { + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats + }) +} + +function markStartTime (cache, opts) { + return Promise.resolve({ startTime: new Date() }) +} + +function markEndTime (cache, opts) { + return Promise.resolve({ endTime: new Date() }) +} + +function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + return fixOwner + .mkdirfix(cache, cache) + .then(() => { + // TODO - fix file permissions too + return fixOwner.chownr(cache, cache) + }) + .then(() => null) +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rimraf it. +// +function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + liveContent.add(entry.integrity.toString()) + }) + return new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }).then(() => { + const contentDir = contentPath.contentDir(cache) + return glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }).then((files) => { + return Promise.resolve({ + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + }).then((stats) => + pMap( + files, + (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + return verifyContent(f, integrity).then((info) => { + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + return stats + }) + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + return stat(f).then((s) => { + return rimraf(f).then(() => { + stats.reclaimedSize += s.size + return stats + }) + }) + } + }, + { concurrency: opts.concurrency } + ).then(() => stats) + ) + }) + }) +} + +function verifyContent (filepath, sri) { + return stat(filepath) + .then((s) => { + const contentInfo = { + size: s.size, + valid: true, + } + return ssri + .checkStream(new fsm.ReadStream(filepath), sri) + .catch((err) => { + if (err.code !== 'EINTEGRITY') { + throw err + } + + return rimraf(filepath).then(() => { + contentInfo.valid = false + }) + }) + .then(() => contentInfo) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + + throw err + }) +} + +function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + return index.ls(cache).then((entries) => { + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + return pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ).then(() => stats) + }) +} + +function rebuildBucket (cache, bucket, stats, opts) { + return truncate(bucket._path).then(() => { + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + return bucket.reduce((promise, entry) => { + return promise.then(() => { + const content = contentPath(cache, entry.integrity) + return stat(content) + .then(() => { + return index + .insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + }) + .then(() => { + stats.totalEntries++ + }) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + return + } + throw err + }) + }) + }, Promise.resolve()) + }) +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rimraf(path.join(cache, 'tmp')) +} + +function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + try { + return writeFile(verifile, '' + +new Date()) + } finally { + fixOwner.chownr.sync(cache, verifile) + } +} + +module.exports.lastRun = lastRun + +function lastRun (cache) { + return readFile(path.join(cache, '_lastverified'), 'utf8').then( + (data) => new Date(+data) + ) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json new file mode 100644 index 0000000000000..b9efa92d9f3e0 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json @@ -0,0 +1,88 @@ +{ + "name": "cacache", + "version": "16.0.0", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin", + "lib" + ], + "scripts": { + "benchmarks": "node test/benchmarks", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint '**/*.js'", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "posttest": "npm run lint" + }, + "repository": "https://github.com/npm/cacache", + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + }, + "devDependencies": { + "@npmcli/template-oss": "^2.9.2", + "benchmark": "^2.1.4", + "chalk": "^4.0.0", + "require-inject": "^1.4.4", + "tacks": "^1.3.0", + "tap": "^15.0.9" + }, + "tap": { + "100": true, + "test-regex": "test/[^/]*.js" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "windowsCI": false, + "version": "2.9.2" + }, + "author": "GitHub Inc." +} diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json index 385a34b85af27..6c05ee529cc82 100644 --- a/node_modules/@npmcli/metavuln-calculator/package.json +++ b/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "3.0.0", + "version": "3.0.1", "main": "lib/index.js", "files": [ "bin", @@ -29,20 +29,20 @@ "coverage-map": "map.js" }, "devDependencies": { - "@npmcli/template-oss": "^2.7.1", + "@npmcli/template-oss": "^2.9.2", "require-inject": "^1.4.4", "tap": "^15.1.6" }, "dependencies": { - "cacache": "^15.3.0", + "cacache": "^16.0.0", "json-parse-even-better-errors": "^2.3.1", - "pacote": "^13.0.1", + "pacote": "^13.0.3", "semver": "^7.3.5" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16" }, "templateOSS": { - "version": "2.7.1" + "version": "2.9.2" } } diff --git a/package-lock.json b/package-lock.json index 00f6b4d6c72a9..db7bad188dc91 100644 --- a/package-lock.json +++ b/package-lock.json @@ -959,19 +959,47 @@ } }, "node_modules/@npmcli/metavuln-calculator": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-3.0.0.tgz", - "integrity": "sha512-tIzAdW3DAvlyuQyYvy7WuDKaJs55LoXFAIyglZTrHsc9DGZWP1YVL7+8WFKqx+lHyHUEkfk02Dc8ie4JWtNO6w==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-3.0.1.tgz", + "integrity": "sha512-XnaD5UfV/qQoIEPWnfBntw8Ik5HWkxEc1wCmfHxhogdj06bwP51nAyU3QLBdhnFsmQQElqV0S8eHXn2zEXnSZw==", "dependencies": { - "cacache": "^15.3.0", + "cacache": "^16.0.0", "json-parse-even-better-errors": "^2.3.1", - "pacote": "^13.0.1", + "pacote": "^13.0.3", "semver": "^7.3.5" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", + "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, "node_modules/@npmcli/move-file": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", @@ -10387,7 +10415,7 @@ "@isaacs/string-locale-compare": "^1.1.0", "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/map-workspaces": "^2.0.0", - "@npmcli/metavuln-calculator": "^3.0.0", + "@npmcli/metavuln-calculator": "^3.0.1", "@npmcli/move-file": "^1.1.0", "@npmcli/name-from-folder": "^1.0.1", "@npmcli/node-gyp": "^1.0.3", @@ -11332,7 +11360,7 @@ "@isaacs/string-locale-compare": "^1.1.0", "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/map-workspaces": "^2.0.0", - "@npmcli/metavuln-calculator": "^3.0.0", + "@npmcli/metavuln-calculator": "3.0.1", "@npmcli/move-file": "^1.1.0", "@npmcli/name-from-folder": "^1.0.1", "@npmcli/node-gyp": "^1.0.3", @@ -11480,14 +11508,41 @@ } }, "@npmcli/metavuln-calculator": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-3.0.0.tgz", - "integrity": "sha512-tIzAdW3DAvlyuQyYvy7WuDKaJs55LoXFAIyglZTrHsc9DGZWP1YVL7+8WFKqx+lHyHUEkfk02Dc8ie4JWtNO6w==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-3.0.1.tgz", + "integrity": "sha512-XnaD5UfV/qQoIEPWnfBntw8Ik5HWkxEc1wCmfHxhogdj06bwP51nAyU3QLBdhnFsmQQElqV0S8eHXn2zEXnSZw==", "requires": { - "cacache": "^15.3.0", + "cacache": "^16.0.0", "json-parse-even-better-errors": "^2.3.1", - "pacote": "^13.0.1", + "pacote": "^13.0.3", "semver": "^7.3.5" + }, + "dependencies": { + "cacache": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", + "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", + "requires": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + } + } } }, "@npmcli/move-file": { diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 3a4c83f553f32..bdc92326e02d8 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -6,7 +6,7 @@ "@isaacs/string-locale-compare": "^1.1.0", "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/map-workspaces": "^2.0.0", - "@npmcli/metavuln-calculator": "^3.0.0", + "@npmcli/metavuln-calculator": "^3.0.1", "@npmcli/move-file": "^1.1.0", "@npmcli/name-from-folder": "^1.0.1", "@npmcli/node-gyp": "^1.0.3", From 68c372cf2098717740594310762fe8b3d7028873 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 14 Mar 2022 13:50:12 -0700 Subject: [PATCH 02/11] deps: pacote@13.0.4 --- node_modules/npm-packlist/bin/index.js | 9 +- node_modules/npm-packlist/{ => lib}/index.js | 5 +- node_modules/npm-packlist/package.json | 31 +- .../pacote/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 ++ .../node_modules/cacache/lib/content/read.js | 259 +++++++++++ .../node_modules/cacache/lib/content/rm.js | 20 + .../node_modules/cacache/lib/content/write.js | 194 +++++++++ .../node_modules/cacache/lib/entry-index.js | 412 ++++++++++++++++++ .../pacote/node_modules/cacache/lib/get.js | 251 +++++++++++ .../pacote/node_modules/cacache/lib/index.js | 45 ++ .../node_modules/cacache/lib/memoization.js | 74 ++++ .../pacote/node_modules/cacache/lib/put.js | 87 ++++ .../pacote/node_modules/cacache/lib/rm.js | 31 ++ .../node_modules/cacache/lib/util/disposer.js | 31 ++ .../cacache/lib/util/fix-owner.js | 148 +++++++ .../cacache/lib/util/hash-to-segments.js | 7 + .../cacache/lib/util/move-file.js | 69 +++ .../node_modules/cacache/lib/util/tmp.js | 35 ++ .../pacote/node_modules/cacache/lib/verify.js | 291 +++++++++++++ .../pacote/node_modules/cacache/package.json | 88 ++++ node_modules/pacote/package.json | 16 +- package-lock.json | 126 ++++-- package.json | 2 +- workspaces/libnpmdiff/package.json | 2 +- workspaces/libnpmexec/package.json | 2 +- workspaces/libnpmpack/package.json | 2 +- 27 files changed, 2216 insertions(+), 66 deletions(-) rename node_modules/npm-packlist/{ => lib}/index.js (99%) create mode 100644 node_modules/pacote/node_modules/cacache/LICENSE.md create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/get.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/index.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/put.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/rm.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/disposer.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/fix-owner.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/move-file.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/verify.js create mode 100644 node_modules/pacote/node_modules/cacache/package.json diff --git a/node_modules/npm-packlist/bin/index.js b/node_modules/npm-packlist/bin/index.js index 40811db7d32e7..a42f5b74ce80e 100755 --- a/node_modules/npm-packlist/bin/index.js +++ b/node_modules/npm-packlist/bin/index.js @@ -6,18 +6,19 @@ process.argv.slice(2).forEach(arg => { if (arg === '-h' || arg === '--help') { console.log('usage: npm-packlist [-s --sort] [directory, directory, ...]') process.exit(0) - } else if (arg === '-s' || arg === '--sort') + } else if (arg === '-s' || arg === '--sort') { doSort = true - else + } else { dirs.push(arg) + } }) const sort = list => doSort ? list.sort((a, b) => a.localeCompare(b, 'en')) : list const packlist = require('../') -if (!dirs.length) +if (!dirs.length) { console.log(sort(packlist.sync({ path: process.cwd() })).join('\n')) -else { +} else { dirs.forEach(path => { console.log(`> ${path}`) console.log(sort(packlist.sync({ path })).join('\n')) diff --git a/node_modules/npm-packlist/index.js b/node_modules/npm-packlist/lib/index.js similarity index 99% rename from node_modules/npm-packlist/index.js rename to node_modules/npm-packlist/lib/index.js index 76018557cb9c8..1b67e4e71e04d 100644 --- a/node_modules/npm-packlist/index.js +++ b/node_modules/npm-packlist/lib/index.js @@ -58,6 +58,7 @@ const defaultRules = [ '*.orig', '/package-lock.json', '/yarn.lock', + '/pnpm-lock.yaml', '/archived-packages/**', ] @@ -248,7 +249,7 @@ const npmWalker = Class => class Walker extends Class { } } const processResults = results => { - for (const {negate, fileList} of results) { + for (const { negate, fileList } of results) { if (negate) { fileList.forEach(f => { f = f.replace(/\/+$/, '') @@ -276,7 +277,7 @@ const npmWalker = Class => class Walker extends Class { // maintain the index so that we process them in-order only once all // are completed, otherwise the parallelism messes things up, since a // glob like **/*.js will always be slower than a subsequent !foo.js - patterns.forEach(({pattern, negate}, i) => + patterns.forEach(({ pattern, negate }, i) => this.globFiles(pattern, (er, res) => then(pattern, negate, er, res, i))) } diff --git a/node_modules/npm-packlist/package.json b/node_modules/npm-packlist/package.json index 2fe493a203601..ab270f60713b6 100644 --- a/node_modules/npm-packlist/package.json +++ b/node_modules/npm-packlist/package.json @@ -1,40 +1,42 @@ { "name": "npm-packlist", - "version": "3.0.0", + "version": "4.0.0", "description": "Get a list of the files to add from a folder into an npm package", "directories": { "test": "test" }, - "main": "index.js", + "main": "lib", "dependencies": { - "glob": "^7.1.6", + "glob": "^7.2.0", "ignore-walk": "^4.0.1", - "npm-bundled": "^1.1.1", + "npm-bundled": "^1.1.2", "npm-normalize-package-bin": "^1.0.1" }, - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "author": "GitHub Inc.", "license": "ISC", "files": [ - "bin/index.js", - "index.js" + "bin", + "lib" ], "devDependencies": { - "@npmcli/lint": "^1.0.2", + "@npmcli/template-oss": "^2.9.2", "mutate-fs": "^2.1.1", - "tap": "^15.0.6" + "tap": "^15.1.6" }, "scripts": { "test": "tap", - "posttest": "npm run lint --", + "posttest": "npm run lint", "snap": "tap", "postsnap": "npm run lintfix --", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "eslint": "eslint", - "lint": "npm run npmclilint -- \"*.*js\" \"test/**/*.*js\"", + "lint": "eslint '**/*.js'", "lintfix": "npm run lint -- --fix", - "npmclilint": "npmcli-lint" + "npmclilint": "npmcli-lint", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force" }, "repository": { "type": "git", @@ -54,6 +56,9 @@ "npm-packlist": "bin/index.js" }, "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "version": "2.9.2" } } diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/pacote/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/pacote/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/pacote/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..8bffb2af83cab --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/content/read.js @@ -0,0 +1,259 @@ +'use strict' + +const util = require('util') + +const fs = require('fs') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +const lstat = util.promisify(fs.lstat) +const readFile = util.promisify(fs.readFile) + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +function read (cache, integrity, opts = {}) { + const { size } = opts + return withContentSri(cache, integrity, (cpath, sri) => { + // get size + return lstat(cpath).then(stat => ({ stat, cpath, sri })) + }).then(({ stat, cpath, sri }) => { + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + return readFile(cpath, null).then((data) => { + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data + }) + }) +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.sync = readSync + +function readSync (cache, integrity, opts = {}) { + const { size } = opts + return withContentSriSync(cache, integrity, (cpath, sri) => { + const data = fs.readFileSync(cpath) + if (typeof size === 'number' && size !== data.length) { + throw sizeError(size, data.length) + } + + if (ssri.checkData(data, sri)) { + return data + } + + throw integrityError(sri, cpath) + }) +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + withContentSri(cache, integrity, (cpath, sri) => { + // just lstat to ensure it exists + return lstat(cpath).then((stat) => ({ stat, cpath, sri })) + }).then(({ stat, cpath, sri }) => { + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + readPipeline(cpath, stat.size, sri, stream) + }, er => stream.emit('error', er)) + + return stream +} + +let copyFile +if (fs.copyFile) { + module.exports.copy = copy + module.exports.copy.sync = copySync + copyFile = util.promisify(fs.copyFile) +} + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return copyFile(cpath, dest) + }) +} + +function copySync (cache, integrity, dest) { + return withContentSriSync(cache, integrity, (cpath, sri) => { + return fs.copyFileSync(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +function hasContent (cache, integrity) { + if (!integrity) { + return Promise.resolve(false) + } + + return withContentSri(cache, integrity, (cpath, sri) => { + return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) + }).catch((err) => { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + }) +} + +module.exports.hasContent.sync = hasContentSync + +function hasContentSync (cache, integrity) { + if (!integrity) { + return false + } + + return withContentSriSync(cache, integrity, (cpath, sri) => { + try { + const stat = fs.lstatSync(cpath) + return { size: stat.size, sri, stat } + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } + }) +} + +function withContentSri (cache, integrity, fn) { + const tryFn = () => { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + return Promise + .all(digests.map((meta) => { + return withContentSri(cache, meta, fn) + .catch((err) => { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + }) + })) + .then((results) => { + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + }) + } + } + + return new Promise((resolve, reject) => { + try { + tryFn() + .then(resolve) + .catch(reject) + } catch (err) { + reject(err) + } + }) +} + +function withContentSriSync (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + let lastErr = null + for (const meta of digests) { + try { + return withContentSriSync(cache, meta, fn) + } catch (err) { + lastErr = err + } + } + throw lastErr + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/pacote/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..50612364e9b48 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,20 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const { hasContent } = require('./read') +const rimraf = util.promisify(require('rimraf')) + +module.exports = rm + +function rm (cache, integrity) { + return hasContent(cache, integrity).then((content) => { + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + return rimraf(contentPath(cache, content.sri)).then(() => true) + } else { + return false + } + }) +} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/pacote/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..a71e81ad5e150 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/content/write.js @@ -0,0 +1,194 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const fixOwner = require('../util/fix-owner') +const fs = require('fs') +const moveFile = require('../util/move-file') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const { disposer } = require('./../util/disposer') +const fsm = require('fs-minipass') + +const writeFile = util.promisify(fs.writeFile) + +module.exports = write + +function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + if (algorithms && algorithms.length > 1) { + throw new Error('opts.algorithms only supports a single algorithm for now') + } + + if (typeof size === 'number' && data.length !== size) { + return Promise.reject(sizeError(size, data.length)) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + return Promise.reject(checksumError(integrity, sri)) + } + + return disposer(makeTmp(cache, opts), makeTmpDisposer, + (tmp) => { + return writeFile(tmp.target, data, { flag: 'wx' }) + .then(() => moveToDestination(tmp, cache, sri, opts)) + }) + .then(() => ({ integrity: sri, size: data.length })) +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +function handleContent (inputStream, cache, opts) { + return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { + return pipeToTmp(inputStream, cache, tmp.target, opts) + .then((res) => { + return moveToDestination( + tmp, + cache, + res.integrity, + opts + ).then(() => res) + }) + }) +} + +function pipeToTmp (inputStream, cache, tmpTarget, opts) { + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + // NB: this can throw if the hashStream has a problem with + // it, and the data is fully written. but pipeToTmp is only + // called in promisory contexts where that is handled. + const pipeline = new Pipeline( + inputStream, + hashStream, + outStream + ) + + return pipeline.promise() + .then(() => ({ integrity, size })) + .catch(er => rimraf(tmpTarget).then(() => { + throw er + })) +} + +function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ + target: tmpTarget, + moved: false, + })) +} + +function makeTmpDisposer (tmp) { + if (tmp.moved) { + return Promise.resolve() + } + + return rimraf(tmp.target) +} + +function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + + return fixOwner + .mkdirfix(cache, destDir) + .then(() => { + return moveFile(tmp.target, destination) + }) + .then(() => { + tmp.moved = true + return fixOwner.chownr(cache, destination) + }) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/pacote/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..426778b850963 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,412 @@ +'use strict' + +const util = require('util') +const crypto = require('crypto') +const fs = require('fs') +const Minipass = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const { disposer } = require('./util/disposer') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const moveFile = require('@npmcli/move-file') +const _rimraf = require('rimraf') +const rimraf = util.promisify(_rimraf) +rimraf.sync = _rimraf.sync + +const appendFile = util.promisify(fs.appendFile) +const readFile = util.promisify(fs.readFile) +const readdir = util.promisify(fs.readdir) +const writeFile = util.promisify(fs.writeFile) + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fixOwner.mkdirfix(cache, path.dirname(target)) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rimraf(tmp.target) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + try { + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + } + + // write the file atomically + await disposer(setup(), teardown, write) + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +function insert (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + return fixOwner + .mkdirfix(cache, path.dirname(bucket)) + .then(() => { + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + }) + .then(() => fixOwner.chownr(cache, bucket)) + .catch((err) => { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + // There's a class of race conditions that happen when things get deleted + // during fixOwner, or between the two mkdirfix/chownr calls. + // + // It's perfectly fine to just not bother in those cases and lie + // that the index entry was written. Because it's a cache. + }) + .then(() => { + return formatEntry(cache, entry) + }) +} + +module.exports.insert.sync = insertSync + +function insertSync (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + try { + fixOwner.chownr.sync(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +function find (cache, key) { + const bucket = bucketPath(cache, key) + return bucketEntries(bucket) + .then((entries) => { + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + }) +} + +module.exports.find.sync = findSync + +function findSync (cache, key) { + const bucket = bucketPath(cache, key) + try { + return bucketEntriesSync(bucket).reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rimraf(bucket) +} + +module.exports.delete.sync = delSync + +function delSync (cache, key, opts = {}) { + if (!opts.removeFully) { + return insertSync(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rimraf.sync(bucket) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + readdirOrEmpty(indexDir).then(buckets => Promise.all( + buckets.map(bucket => { + const bucketPath = path.join(indexDir, bucket) + return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( + subbuckets.map(subbucket => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + return readdirOrEmpty(subbucketPath).then(entries => Promise.all( + entries.map(entry => { + const entryPath = path.join(subbucketPath, entry) + return bucketEntries(entryPath).then(entries => + // using a Map here prevents duplicate keys from + // showing up twice, I guess? + entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + ).then(reduced => { + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + }).catch(err => { + if (err.code === 'ENOENT') { + return undefined + } + throw err + }) + }) + )) + }) + )) + }) + )) + .then( + () => stream.end(), + err => stream.emit('error', err) + ) + + return stream +} + +module.exports.ls = ls + +function ls (cache) { + return lsStream(cache).collect().then(entries => + entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) + ) +} + +module.exports.bucketEntries = bucketEntries + +function bucketEntries (bucket, filter) { + return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) +} + +module.exports.bucketEntries.sync = bucketEntriesSync + +function bucketEntriesSync (bucket, filter) { + const data = fs.readFileSync(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (e) { + // Entry is corrupted! + return + } + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/pacote/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..d9d4bf4c6416f --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/get.js @@ -0,0 +1,251 @@ +'use strict' + +const Collect = require('minipass-collect') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const fs = require('fs') +const util = require('util') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +const writeFile = util.promisify(fs.writeFile) + +function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve({ + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + }) + } + + return index.find(cache, key, opts).then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + return read(cache, entry.integrity, { integrity, size }).then((data) => { + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) +} +module.exports = getData + +function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized) + } + + return read(cache, key, { integrity, size }).then((res) => { + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res + }) +} +module.exports.byDigest = getDataByDigest + +function getDataSync (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + const entry = index.find.sync(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = read.sync(cache, entry.integrity, { + integrity: integrity, + size: size, + }) + const res = { + metadata: entry.metadata, + data: data, + size: entry.size, + integrity: entry.integrity, + } + if (memoize) { + memo.put(cache, entry, res.data, opts) + } + + return res +} + +module.exports.sync = getDataSync + +function getDataByDigestSync (cache, digest, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, digest, opts) + + if (memoized && memoize !== false) { + return memoized + } + + const res = read.sync(cache, digest, { + integrity: integrity, + size: size, + }) + if (memoize) { + memo.put.byDigest(cache, digest, res, opts) + } + + return res +} +module.exports.sync.byDigest = getDataByDigestSync + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + index + .find(cache, key) + .then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + }) + .catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +function copy (cache, key, dest, opts = {}) { + if (read.copy) { + return index.find(cache, key, opts).then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + return read.copy(cache, entry.integrity, dest, opts) + .then(() => { + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) + } + + return getData(cache, key, opts).then((res) => { + return writeFile(dest, res.data).then(() => { + return { + metadata: res.metadata, + size: res.size, + integrity: res.integrity, + } + }) + }) +} +module.exports.copy = copy + +function copyByDigest (cache, key, dest, opts = {}) { + if (read.copy) { + return read.copy(cache, key, dest, opts).then(() => key) + } + + return getDataByDigest(cache, key, opts).then((res) => { + return writeFile(dest, res).then(() => key) + }) +} +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/pacote/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..1c56be68dd8fd --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/index.js @@ -0,0 +1,45 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.sync = get.sync +module.exports.get.sync.byDigest = get.sync.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent +module.exports.get.hasContent.sync = get.hasContent.sync + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/pacote/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..e1b13dd5fd528 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/memoization.js @@ -0,0 +1,74 @@ +'use strict' + +const LRU = require('lru-cache') + +const MAX_SIZE = 50 * 1024 * 1024 // 50MB +const MAX_AGE = 3 * 60 * 1000 + +const MEMOIZED = new LRU({ + max: MAX_SIZE, + maxAge: MAX_AGE, + length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.reset() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/pacote/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..d6904fa301272 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/put.js @@ -0,0 +1,87 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + return write(cache, data, opts).then((res) => { + return index + .insert(cache, key, res.integrity, { ...opts, size: res.size }) + .then((entry) => { + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity + }) + }) +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + flush () { + return index + .insert(cache, key, integrity, { ...opts, size }) + .then((entry) => { + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + + if (integrity) { + pipeline.emit('integrity', integrity) + } + + if (size) { + pipeline.emit('size', size) + } + }) + }, + })) + + return pipeline +} diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/pacote/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..5f00071770b8d --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const util = require('util') + +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +function all (cache) { + memo.clearMemoized() + return rimraf(path.join(cache, '*(content-*|index-*)')) +} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/disposer.js b/node_modules/pacote/node_modules/cacache/lib/util/disposer.js new file mode 100644 index 0000000000000..52d7d3edda7d5 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/util/disposer.js @@ -0,0 +1,31 @@ +'use strict' + +module.exports.disposer = disposer + +function disposer (creatorFn, disposerFn, fn) { + const runDisposer = (resource, result, shouldThrow = false) => { + return disposerFn(resource) + .then( + // disposer resolved, do something with original fn's promise + () => { + if (shouldThrow) { + throw result + } + + return result + }, + // Disposer fn failed, crash process + (err) => { + throw err + // Or process.exit? + }) + } + + return creatorFn + .then((resource) => { + // fn(resource) can throw, so wrap in a promise here + return Promise.resolve().then(() => fn(resource)) + .then((result) => runDisposer(resource, result)) + .catch((err) => runDisposer(resource, err, true)) + }) +} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/fix-owner.js b/node_modules/pacote/node_modules/cacache/lib/util/fix-owner.js new file mode 100644 index 0000000000000..bc14def4e405c --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/util/fix-owner.js @@ -0,0 +1,148 @@ +'use strict' + +const util = require('util') + +const chownr = util.promisify(require('chownr')) +const mkdirp = require('mkdirp') +const inflight = require('promise-inflight') +const inferOwner = require('infer-owner') + +// Memoize getuid()/getgid() calls. +// patch process.setuid/setgid to invalidate cached value on change +const self = { uid: null, gid: null } +const getSelf = () => { + if (typeof self.uid !== 'number') { + self.uid = process.getuid() + const setuid = process.setuid + process.setuid = (uid) => { + self.uid = null + process.setuid = setuid + return process.setuid(uid) + } + } + if (typeof self.gid !== 'number') { + self.gid = process.getgid() + const setgid = process.setgid + process.setgid = (gid) => { + self.gid = null + process.setgid = setgid + return process.setgid(gid) + } + } +} + +module.exports.chownr = fixOwner + +function fixOwner (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return Promise.resolve() + } + + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return Promise.resolve() + } + + return Promise.resolve(inferOwner(cache)).then((owner) => { + const { uid, gid } = owner + + // No need to override if it's already what we used. + if (self.uid === uid && self.gid === gid) { + return + } + + return inflight('fixOwner: fixing ownership on ' + filepath, () => + chownr( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ).catch((err) => { + if (err.code === 'ENOENT') { + return null + } + + throw err + }) + ) + }) +} + +module.exports.chownr.sync = fixOwnerSync + +function fixOwnerSync (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return + } + const { uid, gid } = inferOwner.sync(cache) + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return + } + + if (self.uid === uid && self.gid === gid) { + // No need to override if it's already what we used. + return + } + try { + chownr.sync( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ) + } catch (err) { + // only catch ENOENT, any other error is a problem. + if (err.code === 'ENOENT') { + return null + } + + throw err + } +} + +module.exports.mkdirfix = mkdirfix + +function mkdirfix (cache, p, cb) { + // we have to infer the owner _before_ making the directory, even though + // we aren't going to use the results, since the cache itself might not + // exist yet. If we mkdirp it, then our current uid/gid will be assumed + // to be correct if it creates the cache folder in the process. + return Promise.resolve(inferOwner(cache)).then(() => { + return mkdirp(p) + .then((made) => { + if (made) { + return fixOwner(cache, made).then(() => made) + } + }) + .catch((err) => { + if (err.code === 'EEXIST') { + return fixOwner(cache, p).then(() => null) + } + + throw err + }) + }) +} + +module.exports.mkdirfix.sync = mkdirfixSync + +function mkdirfixSync (cache, p) { + try { + inferOwner.sync(cache) + const made = mkdirp.sync(p) + if (made) { + fixOwnerSync(cache, made) + return made + } + } catch (err) { + if (err.code === 'EEXIST') { + fixOwnerSync(cache, p) + return null + } else { + throw err + } + } +} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/move-file.js b/node_modules/pacote/node_modules/cacache/lib/util/move-file.js new file mode 100644 index 0000000000000..3739cea3df281 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/util/move-file.js @@ -0,0 +1,69 @@ +'use strict' + +const fs = require('fs') +const util = require('util') +const chmod = util.promisify(fs.chmod) +const unlink = util.promisify(fs.unlink) +const stat = util.promisify(fs.stat) +const move = require('@npmcli/move-file') +const pinflight = require('promise-inflight') + +module.exports = moveFile + +function moveFile (src, dest) { + const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || + process.platform === 'win32' + + // This isn't quite an fs.rename -- the assumption is that + // if `dest` already exists, and we get certain errors while + // trying to move it, we should just not bother. + // + // In the case of cache corruption, users will receive an + // EINTEGRITY error elsewhere, and can remove the offending + // content their own way. + // + // Note that, as the name suggests, this strictly only supports file moves. + return new Promise((resolve, reject) => { + fs.link(src, dest, (err) => { + if (err) { + if (isWindows && err.code === 'EPERM') { + // XXX This is a really weird way to handle this situation, as it + // results in the src file being deleted even though the dest + // might not exist. Since we pretty much always write files to + // deterministic locations based on content hash, this is likely + // ok (or at worst, just ends in a future cache miss). But it would + // be worth investigating at some time in the future if this is + // really what we want to do here. + return resolve() + } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { + // file already exists, so whatever + return resolve() + } else { + return reject(err) + } + } else { + return resolve() + } + }) + }) + .then(() => { + // content should never change for any reason, so make it read-only + return Promise.all([ + unlink(src), + !isWindows && chmod(dest, '0444'), + ]) + }) + .catch(() => { + return pinflight('cacache-move-file:' + dest, () => { + return stat(dest).catch((err) => { + if (err.code !== 'ENOENT') { + // Something else is wrong here. Bail bail bail + throw err + } + // file doesn't already exist! let's try a rename -> copy fallback + // only delete if it successfully copies + return move(src, dest) + }) + }) + }) +} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0a5a50eba3061 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,35 @@ +'use strict' + +const fs = require('@npmcli/fs') + +const fixOwner = require('./fix-owner') +const path = require('path') + +module.exports.mkdir = mktmpdir + +function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + .then(() => { + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) + }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) +} + +module.exports.fix = fixtmpdir + +function fixtmpdir (cache) { + return fixOwner(cache, path.join(cache, 'tmp')) +} diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/pacote/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..300cd9f9de1c4 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/lib/verify.js @@ -0,0 +1,291 @@ +'use strict' + +const util = require('util') + +const pMap = require('p-map') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const fs = require('fs') +const fsm = require('fs-minipass') +const glob = util.promisify(require('glob')) +const index = require('./entry-index') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const stat = util.promisify(fs.stat) +const truncate = util.promisify(fs.truncate) +const writeFile = util.promisify(fs.writeFile) +const readFile = util.promisify(fs.readFile) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + return steps + .reduce((promise, step, i) => { + const label = step.name + const start = new Date() + return promise.then((stats) => { + return step(cache, opts).then((s) => { + s && + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + + stats.runTime[label] = end - start + return Promise.resolve(stats) + }) + }) + }, Promise.resolve({})) + .then((stats) => { + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats + }) +} + +function markStartTime (cache, opts) { + return Promise.resolve({ startTime: new Date() }) +} + +function markEndTime (cache, opts) { + return Promise.resolve({ endTime: new Date() }) +} + +function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + return fixOwner + .mkdirfix(cache, cache) + .then(() => { + // TODO - fix file permissions too + return fixOwner.chownr(cache, cache) + }) + .then(() => null) +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rimraf it. +// +function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + liveContent.add(entry.integrity.toString()) + }) + return new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }).then(() => { + const contentDir = contentPath.contentDir(cache) + return glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }).then((files) => { + return Promise.resolve({ + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + }).then((stats) => + pMap( + files, + (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + return verifyContent(f, integrity).then((info) => { + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + return stats + }) + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + return stat(f).then((s) => { + return rimraf(f).then(() => { + stats.reclaimedSize += s.size + return stats + }) + }) + } + }, + { concurrency: opts.concurrency } + ).then(() => stats) + ) + }) + }) +} + +function verifyContent (filepath, sri) { + return stat(filepath) + .then((s) => { + const contentInfo = { + size: s.size, + valid: true, + } + return ssri + .checkStream(new fsm.ReadStream(filepath), sri) + .catch((err) => { + if (err.code !== 'EINTEGRITY') { + throw err + } + + return rimraf(filepath).then(() => { + contentInfo.valid = false + }) + }) + .then(() => contentInfo) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + + throw err + }) +} + +function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + return index.ls(cache).then((entries) => { + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + return pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ).then(() => stats) + }) +} + +function rebuildBucket (cache, bucket, stats, opts) { + return truncate(bucket._path).then(() => { + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + return bucket.reduce((promise, entry) => { + return promise.then(() => { + const content = contentPath(cache, entry.integrity) + return stat(content) + .then(() => { + return index + .insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + }) + .then(() => { + stats.totalEntries++ + }) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + return + } + throw err + }) + }) + }, Promise.resolve()) + }) +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rimraf(path.join(cache, 'tmp')) +} + +function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + try { + return writeFile(verifile, '' + +new Date()) + } finally { + fixOwner.chownr.sync(cache, verifile) + } +} + +module.exports.lastRun = lastRun + +function lastRun (cache) { + return readFile(path.join(cache, '_lastverified'), 'utf8').then( + (data) => new Date(+data) + ) +} diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/pacote/node_modules/cacache/package.json new file mode 100644 index 0000000000000..b9efa92d9f3e0 --- /dev/null +++ b/node_modules/pacote/node_modules/cacache/package.json @@ -0,0 +1,88 @@ +{ + "name": "cacache", + "version": "16.0.0", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin", + "lib" + ], + "scripts": { + "benchmarks": "node test/benchmarks", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint '**/*.js'", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "posttest": "npm run lint" + }, + "repository": "https://github.com/npm/cacache", + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + }, + "devDependencies": { + "@npmcli/template-oss": "^2.9.2", + "benchmark": "^2.1.4", + "chalk": "^4.0.0", + "require-inject": "^1.4.4", + "tacks": "^1.3.0", + "tap": "^15.0.9" + }, + "tap": { + "100": true, + "test-regex": "test/[^/]*.js" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "windowsCI": false, + "version": "2.9.2" + }, + "author": "GitHub Inc." +} diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index fc6ab52fa9bc0..a527602ea7cc2 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "13.0.3", + "version": "13.0.4", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -25,7 +25,7 @@ "coverage-map": "map.js" }, "devDependencies": { - "@npmcli/template-oss": "^2.7.1", + "@npmcli/template-oss": "^2.9.2", "mutate-fs": "^2.1.1", "npm-registry-mock": "^1.3.1", "tap": "^15.1.6" @@ -43,20 +43,20 @@ "@npmcli/git": "^3.0.0", "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/promise-spawn": "^1.2.0", - "@npmcli/run-script": "^3.0.0", - "cacache": "^15.3.0", + "@npmcli/run-script": "^3.0.1", + "cacache": "^16.0.0", "chownr": "^2.0.0", "fs-minipass": "^2.1.0", "infer-owner": "^1.0.4", "minipass": "^3.1.6", "mkdirp": "^1.0.4", "npm-package-arg": "^9.0.0", - "npm-packlist": "^3.0.0", + "npm-packlist": "^4.0.0", "npm-pick-manifest": "^7.0.0", - "npm-registry-fetch": "^13.0.0", + "npm-registry-fetch": "^13.0.1", "proc-log": "^2.0.0", "promise-retry": "^2.0.1", - "read-package-json": "^4.1.1", + "read-package-json": "^4.1.2", "read-package-json-fast": "^2.0.3", "rimraf": "^3.0.2", "ssri": "^8.0.1", @@ -67,7 +67,7 @@ }, "repository": "git@github.com:npm/pacote", "templateOSS": { - "version": "2.7.1", + "version": "2.9.2", "windowsCI": false } } diff --git a/package-lock.json b/package-lock.json index db7bad188dc91..413f6b930892e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -139,7 +139,7 @@ "npm-user-validate": "^1.0.1", "npmlog": "^6.0.1", "opener": "^1.5.2", - "pacote": "^13.0.3", + "pacote": "^13.0.4", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", "qrcode-terminal": "^0.12.0", @@ -5549,21 +5549,21 @@ } }, "node_modules/npm-packlist": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-3.0.0.tgz", - "integrity": "sha512-L/cbzmutAwII5glUcf2DBRNY/d0TFd4e/FnaZigJV6JD85RHZXJFGwCndjMWiiViiWSsWt3tiOLpI3ByTnIdFQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-4.0.0.tgz", + "integrity": "sha512-gL6XC/iw9YSmqArmZOGSkyy+yIZf2f7uH0p4Vmxef/irn73vd9/rDkCtvm+a9rh/QK2xGYfCAMOghM06ymzC0A==", "inBundle": true, "dependencies": { - "glob": "^7.1.6", + "glob": "^7.2.0", "ignore-walk": "^4.0.1", - "npm-bundled": "^1.1.1", + "npm-bundled": "^1.1.2", "npm-normalize-package-bin": "^1.0.1" }, "bin": { "npm-packlist": "bin/index.js" }, "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" } }, "node_modules/npm-pick-manifest": { @@ -5957,28 +5957,28 @@ } }, "node_modules/pacote": { - "version": "13.0.3", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-13.0.3.tgz", - "integrity": "sha512-8thQ06YoO01O1k5rvSpHS/XPJZucw2DPiiT1jI+ys8QaTN6ifAyxfyoABHBa8nIt/4wPdzly4GEPqshctHFoYA==", + "version": "13.0.4", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-13.0.4.tgz", + "integrity": "sha512-uhkG1ZclRmL+9O2vfrDUIDSTPIbSClCe9BUySy8IAkuF80eG51yZB+9hfStOF/O0LwVn7PcjqdGe+SJPxRp7jg==", "inBundle": true, "dependencies": { "@npmcli/git": "^3.0.0", "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/promise-spawn": "^1.2.0", - "@npmcli/run-script": "^3.0.0", - "cacache": "^15.3.0", + "@npmcli/run-script": "^3.0.1", + "cacache": "^16.0.0", "chownr": "^2.0.0", "fs-minipass": "^2.1.0", "infer-owner": "^1.0.4", "minipass": "^3.1.6", "mkdirp": "^1.0.4", "npm-package-arg": "^9.0.0", - "npm-packlist": "^3.0.0", + "npm-packlist": "^4.0.0", "npm-pick-manifest": "^7.0.0", - "npm-registry-fetch": "^13.0.0", + "npm-registry-fetch": "^13.0.1", "proc-log": "^2.0.0", "promise-retry": "^2.0.1", - "read-package-json": "^4.1.1", + "read-package-json": "^4.1.2", "read-package-json-fast": "^2.0.3", "rimraf": "^3.0.2", "ssri": "^8.0.1", @@ -5991,6 +5991,35 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, + "node_modules/pacote/node_modules/cacache": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", + "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", + "inBundle": true, + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -10506,7 +10535,7 @@ "diff": "^5.0.0", "minimatch": "^3.0.4", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.2", + "pacote": "^13.0.4", "tar": "^6.1.0" }, "devDependencies": { @@ -10529,7 +10558,7 @@ "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^9.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.2", + "pacote": "^13.0.4", "proc-log": "^2.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", @@ -10671,7 +10700,7 @@ "dependencies": { "@npmcli/run-script": "^3.0.0", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.2" + "pacote": "^13.0.4" }, "devDependencies": { "@npmcli/template-oss": "^2.4.2", @@ -11360,7 +11389,7 @@ "@isaacs/string-locale-compare": "^1.1.0", "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/map-workspaces": "^2.0.0", - "@npmcli/metavuln-calculator": "3.0.1", + "@npmcli/metavuln-calculator": "^3.0.1", "@npmcli/move-file": "^1.1.0", "@npmcli/name-from-folder": "^1.0.1", "@npmcli/node-gyp": "^1.0.3", @@ -14382,7 +14411,7 @@ "eslint": "^8.1.0", "minimatch": "^3.0.4", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.2", + "pacote": "13.0.4", "tap": "^15.0.9", "tar": "^6.1.0" } @@ -14399,7 +14428,7 @@ "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^9.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.2", + "pacote": "13.0.4", "proc-log": "^2.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", @@ -14506,7 +14535,7 @@ "@npmcli/template-oss": "^2.4.2", "nock": "^13.0.7", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.2", + "pacote": "13.0.4", "tap": "^15.0.0" } }, @@ -15165,13 +15194,13 @@ } }, "npm-packlist": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-3.0.0.tgz", - "integrity": "sha512-L/cbzmutAwII5glUcf2DBRNY/d0TFd4e/FnaZigJV6JD85RHZXJFGwCndjMWiiViiWSsWt3tiOLpI3ByTnIdFQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-4.0.0.tgz", + "integrity": "sha512-gL6XC/iw9YSmqArmZOGSkyy+yIZf2f7uH0p4Vmxef/irn73vd9/rDkCtvm+a9rh/QK2xGYfCAMOghM06ymzC0A==", "requires": { - "glob": "^7.1.6", + "glob": "^7.2.0", "ignore-walk": "^4.0.1", - "npm-bundled": "^1.1.1", + "npm-bundled": "^1.1.2", "npm-normalize-package-bin": "^1.0.1" } }, @@ -15464,31 +15493,58 @@ } }, "pacote": { - "version": "13.0.3", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-13.0.3.tgz", - "integrity": "sha512-8thQ06YoO01O1k5rvSpHS/XPJZucw2DPiiT1jI+ys8QaTN6ifAyxfyoABHBa8nIt/4wPdzly4GEPqshctHFoYA==", + "version": "13.0.4", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-13.0.4.tgz", + "integrity": "sha512-uhkG1ZclRmL+9O2vfrDUIDSTPIbSClCe9BUySy8IAkuF80eG51yZB+9hfStOF/O0LwVn7PcjqdGe+SJPxRp7jg==", "requires": { "@npmcli/git": "^3.0.0", "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/promise-spawn": "^1.2.0", - "@npmcli/run-script": "^3.0.0", - "cacache": "^15.3.0", + "@npmcli/run-script": "^3.0.1", + "cacache": "^16.0.0", "chownr": "^2.0.0", "fs-minipass": "^2.1.0", "infer-owner": "^1.0.4", "minipass": "^3.1.6", "mkdirp": "^1.0.4", "npm-package-arg": "^9.0.0", - "npm-packlist": "^3.0.0", + "npm-packlist": "^4.0.0", "npm-pick-manifest": "^7.0.0", - "npm-registry-fetch": "^13.0.0", + "npm-registry-fetch": "^13.0.1", "proc-log": "^2.0.0", "promise-retry": "^2.0.1", - "read-package-json": "^4.1.1", + "read-package-json": "^4.1.2", "read-package-json-fast": "^2.0.3", "rimraf": "^3.0.2", "ssri": "^8.0.1", "tar": "^6.1.11" + }, + "dependencies": { + "cacache": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", + "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", + "requires": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + } + } } }, "parent-module": { diff --git a/package.json b/package.json index d7e2f8b03fa06..41df58368a863 100644 --- a/package.json +++ b/package.json @@ -107,7 +107,7 @@ "npm-user-validate": "^1.0.1", "npmlog": "^6.0.1", "opener": "^1.5.2", - "pacote": "^13.0.3", + "pacote": "^13.0.4", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", "qrcode-terminal": "^0.12.0", diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json index d1f9a92d289be..37a98a20825c4 100644 --- a/workspaces/libnpmdiff/package.json +++ b/workspaces/libnpmdiff/package.json @@ -59,7 +59,7 @@ "diff": "^5.0.0", "minimatch": "^3.0.4", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.2", + "pacote": "^13.0.4", "tar": "^6.1.0" }, "templateOSS": { diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index 4f607099b10e3..e0705a2742078 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -62,7 +62,7 @@ "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^9.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.2", + "pacote": "^13.0.4", "proc-log": "^2.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json index 9f7e56a1ad4dd..690db7cbbac7d 100644 --- a/workspaces/libnpmpack/package.json +++ b/workspaces/libnpmpack/package.json @@ -42,7 +42,7 @@ "dependencies": { "@npmcli/run-script": "^3.0.0", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.2" + "pacote": "^13.0.4" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16" From f548b4a1781e47ccf356f741dcc29c50621a0c17 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 14 Mar 2022 13:52:55 -0700 Subject: [PATCH 03/11] deps: make-fetch-happen@10.0.6 --- .../node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 ++ .../node_modules/cacache/lib/content/read.js | 259 +++++++++++ .../node_modules/cacache/lib/content/rm.js | 20 + .../node_modules/cacache/lib/content/write.js | 194 +++++++++ .../node_modules/cacache/lib/entry-index.js | 412 ++++++++++++++++++ .../node_modules/cacache/lib/get.js | 251 +++++++++++ .../node_modules/cacache/lib/index.js | 45 ++ .../node_modules/cacache/lib/memoization.js | 74 ++++ .../node_modules/cacache/lib/put.js | 87 ++++ .../node_modules/cacache/lib/rm.js | 31 ++ .../node_modules/cacache/lib/util/disposer.js | 31 ++ .../cacache/lib/util/fix-owner.js | 148 +++++++ .../cacache/lib/util/hash-to-segments.js | 7 + .../cacache/lib/util/move-file.js | 69 +++ .../node_modules/cacache/lib/util/tmp.js | 35 ++ .../node_modules/cacache/lib/verify.js | 291 +++++++++++++ .../cacache/node_modules/lru-cache/LICENSE | 15 + .../cacache/node_modules/lru-cache/index.js | 334 ++++++++++++++ .../node_modules/lru-cache/package.json | 34 ++ .../node_modules/cacache/package.json | 88 ++++ .../node_modules/lru-cache/bundle/main.js | 1 - .../node_modules/lru-cache/bundle/main.mjs | 1 - .../node_modules/lru-cache/index.js | 36 +- .../node_modules/lru-cache/package.json | 19 +- node_modules/make-fetch-happen/package.json | 10 +- node_modules/minipass-fetch/lib/index.js | 8 + node_modules/minipass-fetch/package.json | 7 +- package-lock.json | 132 ++++-- package.json | 2 +- 30 files changed, 2631 insertions(+), 55 deletions(-) create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/get.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/index.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/put.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/package.json delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..8bffb2af83cab --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js @@ -0,0 +1,259 @@ +'use strict' + +const util = require('util') + +const fs = require('fs') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +const lstat = util.promisify(fs.lstat) +const readFile = util.promisify(fs.readFile) + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +function read (cache, integrity, opts = {}) { + const { size } = opts + return withContentSri(cache, integrity, (cpath, sri) => { + // get size + return lstat(cpath).then(stat => ({ stat, cpath, sri })) + }).then(({ stat, cpath, sri }) => { + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + return readFile(cpath, null).then((data) => { + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data + }) + }) +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.sync = readSync + +function readSync (cache, integrity, opts = {}) { + const { size } = opts + return withContentSriSync(cache, integrity, (cpath, sri) => { + const data = fs.readFileSync(cpath) + if (typeof size === 'number' && size !== data.length) { + throw sizeError(size, data.length) + } + + if (ssri.checkData(data, sri)) { + return data + } + + throw integrityError(sri, cpath) + }) +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + withContentSri(cache, integrity, (cpath, sri) => { + // just lstat to ensure it exists + return lstat(cpath).then((stat) => ({ stat, cpath, sri })) + }).then(({ stat, cpath, sri }) => { + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + readPipeline(cpath, stat.size, sri, stream) + }, er => stream.emit('error', er)) + + return stream +} + +let copyFile +if (fs.copyFile) { + module.exports.copy = copy + module.exports.copy.sync = copySync + copyFile = util.promisify(fs.copyFile) +} + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return copyFile(cpath, dest) + }) +} + +function copySync (cache, integrity, dest) { + return withContentSriSync(cache, integrity, (cpath, sri) => { + return fs.copyFileSync(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +function hasContent (cache, integrity) { + if (!integrity) { + return Promise.resolve(false) + } + + return withContentSri(cache, integrity, (cpath, sri) => { + return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) + }).catch((err) => { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + }) +} + +module.exports.hasContent.sync = hasContentSync + +function hasContentSync (cache, integrity) { + if (!integrity) { + return false + } + + return withContentSriSync(cache, integrity, (cpath, sri) => { + try { + const stat = fs.lstatSync(cpath) + return { size: stat.size, sri, stat } + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } + }) +} + +function withContentSri (cache, integrity, fn) { + const tryFn = () => { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + return Promise + .all(digests.map((meta) => { + return withContentSri(cache, meta, fn) + .catch((err) => { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + }) + })) + .then((results) => { + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + }) + } + } + + return new Promise((resolve, reject) => { + try { + tryFn() + .then(resolve) + .catch(reject) + } catch (err) { + reject(err) + } + }) +} + +function withContentSriSync (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + let lastErr = null + for (const meta of digests) { + try { + return withContentSriSync(cache, meta, fn) + } catch (err) { + lastErr = err + } + } + throw lastErr + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..50612364e9b48 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,20 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const { hasContent } = require('./read') +const rimraf = util.promisify(require('rimraf')) + +module.exports = rm + +function rm (cache, integrity) { + return hasContent(cache, integrity).then((content) => { + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + return rimraf(contentPath(cache, content.sri)).then(() => true) + } else { + return false + } + }) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..a71e81ad5e150 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js @@ -0,0 +1,194 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const fixOwner = require('../util/fix-owner') +const fs = require('fs') +const moveFile = require('../util/move-file') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const { disposer } = require('./../util/disposer') +const fsm = require('fs-minipass') + +const writeFile = util.promisify(fs.writeFile) + +module.exports = write + +function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + if (algorithms && algorithms.length > 1) { + throw new Error('opts.algorithms only supports a single algorithm for now') + } + + if (typeof size === 'number' && data.length !== size) { + return Promise.reject(sizeError(size, data.length)) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + return Promise.reject(checksumError(integrity, sri)) + } + + return disposer(makeTmp(cache, opts), makeTmpDisposer, + (tmp) => { + return writeFile(tmp.target, data, { flag: 'wx' }) + .then(() => moveToDestination(tmp, cache, sri, opts)) + }) + .then(() => ({ integrity: sri, size: data.length })) +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +function handleContent (inputStream, cache, opts) { + return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { + return pipeToTmp(inputStream, cache, tmp.target, opts) + .then((res) => { + return moveToDestination( + tmp, + cache, + res.integrity, + opts + ).then(() => res) + }) + }) +} + +function pipeToTmp (inputStream, cache, tmpTarget, opts) { + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + // NB: this can throw if the hashStream has a problem with + // it, and the data is fully written. but pipeToTmp is only + // called in promisory contexts where that is handled. + const pipeline = new Pipeline( + inputStream, + hashStream, + outStream + ) + + return pipeline.promise() + .then(() => ({ integrity, size })) + .catch(er => rimraf(tmpTarget).then(() => { + throw er + })) +} + +function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ + target: tmpTarget, + moved: false, + })) +} + +function makeTmpDisposer (tmp) { + if (tmp.moved) { + return Promise.resolve() + } + + return rimraf(tmp.target) +} + +function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + + return fixOwner + .mkdirfix(cache, destDir) + .then(() => { + return moveFile(tmp.target, destination) + }) + .then(() => { + tmp.moved = true + return fixOwner.chownr(cache, destination) + }) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..426778b850963 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,412 @@ +'use strict' + +const util = require('util') +const crypto = require('crypto') +const fs = require('fs') +const Minipass = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const { disposer } = require('./util/disposer') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const moveFile = require('@npmcli/move-file') +const _rimraf = require('rimraf') +const rimraf = util.promisify(_rimraf) +rimraf.sync = _rimraf.sync + +const appendFile = util.promisify(fs.appendFile) +const readFile = util.promisify(fs.readFile) +const readdir = util.promisify(fs.readdir) +const writeFile = util.promisify(fs.writeFile) + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fixOwner.mkdirfix(cache, path.dirname(target)) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rimraf(tmp.target) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + try { + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + } + + // write the file atomically + await disposer(setup(), teardown, write) + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +function insert (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + return fixOwner + .mkdirfix(cache, path.dirname(bucket)) + .then(() => { + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + }) + .then(() => fixOwner.chownr(cache, bucket)) + .catch((err) => { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + // There's a class of race conditions that happen when things get deleted + // during fixOwner, or between the two mkdirfix/chownr calls. + // + // It's perfectly fine to just not bother in those cases and lie + // that the index entry was written. Because it's a cache. + }) + .then(() => { + return formatEntry(cache, entry) + }) +} + +module.exports.insert.sync = insertSync + +function insertSync (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + try { + fixOwner.chownr.sync(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +function find (cache, key) { + const bucket = bucketPath(cache, key) + return bucketEntries(bucket) + .then((entries) => { + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + }) +} + +module.exports.find.sync = findSync + +function findSync (cache, key) { + const bucket = bucketPath(cache, key) + try { + return bucketEntriesSync(bucket).reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rimraf(bucket) +} + +module.exports.delete.sync = delSync + +function delSync (cache, key, opts = {}) { + if (!opts.removeFully) { + return insertSync(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rimraf.sync(bucket) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + readdirOrEmpty(indexDir).then(buckets => Promise.all( + buckets.map(bucket => { + const bucketPath = path.join(indexDir, bucket) + return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( + subbuckets.map(subbucket => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + return readdirOrEmpty(subbucketPath).then(entries => Promise.all( + entries.map(entry => { + const entryPath = path.join(subbucketPath, entry) + return bucketEntries(entryPath).then(entries => + // using a Map here prevents duplicate keys from + // showing up twice, I guess? + entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + ).then(reduced => { + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + }).catch(err => { + if (err.code === 'ENOENT') { + return undefined + } + throw err + }) + }) + )) + }) + )) + }) + )) + .then( + () => stream.end(), + err => stream.emit('error', err) + ) + + return stream +} + +module.exports.ls = ls + +function ls (cache) { + return lsStream(cache).collect().then(entries => + entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) + ) +} + +module.exports.bucketEntries = bucketEntries + +function bucketEntries (bucket, filter) { + return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) +} + +module.exports.bucketEntries.sync = bucketEntriesSync + +function bucketEntriesSync (bucket, filter) { + const data = fs.readFileSync(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (e) { + // Entry is corrupted! + return + } + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..d9d4bf4c6416f --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js @@ -0,0 +1,251 @@ +'use strict' + +const Collect = require('minipass-collect') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const fs = require('fs') +const util = require('util') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +const writeFile = util.promisify(fs.writeFile) + +function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve({ + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + }) + } + + return index.find(cache, key, opts).then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + return read(cache, entry.integrity, { integrity, size }).then((data) => { + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) +} +module.exports = getData + +function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized) + } + + return read(cache, key, { integrity, size }).then((res) => { + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res + }) +} +module.exports.byDigest = getDataByDigest + +function getDataSync (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + const entry = index.find.sync(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = read.sync(cache, entry.integrity, { + integrity: integrity, + size: size, + }) + const res = { + metadata: entry.metadata, + data: data, + size: entry.size, + integrity: entry.integrity, + } + if (memoize) { + memo.put(cache, entry, res.data, opts) + } + + return res +} + +module.exports.sync = getDataSync + +function getDataByDigestSync (cache, digest, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, digest, opts) + + if (memoized && memoize !== false) { + return memoized + } + + const res = read.sync(cache, digest, { + integrity: integrity, + size: size, + }) + if (memoize) { + memo.put.byDigest(cache, digest, res, opts) + } + + return res +} +module.exports.sync.byDigest = getDataByDigestSync + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + index + .find(cache, key) + .then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + }) + .catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +function copy (cache, key, dest, opts = {}) { + if (read.copy) { + return index.find(cache, key, opts).then((entry) => { + if (!entry) { + throw new index.NotFoundError(cache, key) + } + return read.copy(cache, entry.integrity, dest, opts) + .then(() => { + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) + } + + return getData(cache, key, opts).then((res) => { + return writeFile(dest, res.data).then(() => { + return { + metadata: res.metadata, + size: res.size, + integrity: res.integrity, + } + }) + }) +} +module.exports.copy = copy + +function copyByDigest (cache, key, dest, opts = {}) { + if (read.copy) { + return read.copy(cache, key, dest, opts).then(() => key) + } + + return getDataByDigest(cache, key, opts).then((res) => { + return writeFile(dest, res).then(() => key) + }) +} +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..1c56be68dd8fd --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js @@ -0,0 +1,45 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.sync = get.sync +module.exports.get.sync.byDigest = get.sync.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent +module.exports.get.hasContent.sync = get.hasContent.sync + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..e1b13dd5fd528 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js @@ -0,0 +1,74 @@ +'use strict' + +const LRU = require('lru-cache') + +const MAX_SIZE = 50 * 1024 * 1024 // 50MB +const MAX_AGE = 3 * 60 * 1000 + +const MEMOIZED = new LRU({ + max: MAX_SIZE, + maxAge: MAX_AGE, + length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.reset() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..d6904fa301272 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js @@ -0,0 +1,87 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + return write(cache, data, opts).then((res) => { + return index + .insert(cache, key, res.integrity, { ...opts, size: res.size }) + .then((entry) => { + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity + }) + }) +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + flush () { + return index + .insert(cache, key, integrity, { ...opts, size }) + .then((entry) => { + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + + if (integrity) { + pipeline.emit('integrity', integrity) + } + + if (size) { + pipeline.emit('size', size) + } + }) + }, + })) + + return pipeline +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..5f00071770b8d --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const util = require('util') + +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +function all (cache) { + memo.clearMemoized() + return rimraf(path.join(cache, '*(content-*|index-*)')) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js new file mode 100644 index 0000000000000..52d7d3edda7d5 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js @@ -0,0 +1,31 @@ +'use strict' + +module.exports.disposer = disposer + +function disposer (creatorFn, disposerFn, fn) { + const runDisposer = (resource, result, shouldThrow = false) => { + return disposerFn(resource) + .then( + // disposer resolved, do something with original fn's promise + () => { + if (shouldThrow) { + throw result + } + + return result + }, + // Disposer fn failed, crash process + (err) => { + throw err + // Or process.exit? + }) + } + + return creatorFn + .then((resource) => { + // fn(resource) can throw, so wrap in a promise here + return Promise.resolve().then(() => fn(resource)) + .then((result) => runDisposer(resource, result)) + .catch((err) => runDisposer(resource, err, true)) + }) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js new file mode 100644 index 0000000000000..bc14def4e405c --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js @@ -0,0 +1,148 @@ +'use strict' + +const util = require('util') + +const chownr = util.promisify(require('chownr')) +const mkdirp = require('mkdirp') +const inflight = require('promise-inflight') +const inferOwner = require('infer-owner') + +// Memoize getuid()/getgid() calls. +// patch process.setuid/setgid to invalidate cached value on change +const self = { uid: null, gid: null } +const getSelf = () => { + if (typeof self.uid !== 'number') { + self.uid = process.getuid() + const setuid = process.setuid + process.setuid = (uid) => { + self.uid = null + process.setuid = setuid + return process.setuid(uid) + } + } + if (typeof self.gid !== 'number') { + self.gid = process.getgid() + const setgid = process.setgid + process.setgid = (gid) => { + self.gid = null + process.setgid = setgid + return process.setgid(gid) + } + } +} + +module.exports.chownr = fixOwner + +function fixOwner (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return Promise.resolve() + } + + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return Promise.resolve() + } + + return Promise.resolve(inferOwner(cache)).then((owner) => { + const { uid, gid } = owner + + // No need to override if it's already what we used. + if (self.uid === uid && self.gid === gid) { + return + } + + return inflight('fixOwner: fixing ownership on ' + filepath, () => + chownr( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ).catch((err) => { + if (err.code === 'ENOENT') { + return null + } + + throw err + }) + ) + }) +} + +module.exports.chownr.sync = fixOwnerSync + +function fixOwnerSync (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return + } + const { uid, gid } = inferOwner.sync(cache) + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return + } + + if (self.uid === uid && self.gid === gid) { + // No need to override if it's already what we used. + return + } + try { + chownr.sync( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ) + } catch (err) { + // only catch ENOENT, any other error is a problem. + if (err.code === 'ENOENT') { + return null + } + + throw err + } +} + +module.exports.mkdirfix = mkdirfix + +function mkdirfix (cache, p, cb) { + // we have to infer the owner _before_ making the directory, even though + // we aren't going to use the results, since the cache itself might not + // exist yet. If we mkdirp it, then our current uid/gid will be assumed + // to be correct if it creates the cache folder in the process. + return Promise.resolve(inferOwner(cache)).then(() => { + return mkdirp(p) + .then((made) => { + if (made) { + return fixOwner(cache, made).then(() => made) + } + }) + .catch((err) => { + if (err.code === 'EEXIST') { + return fixOwner(cache, p).then(() => null) + } + + throw err + }) + }) +} + +module.exports.mkdirfix.sync = mkdirfixSync + +function mkdirfixSync (cache, p) { + try { + inferOwner.sync(cache) + const made = mkdirp.sync(p) + if (made) { + fixOwnerSync(cache, made) + return made + } + } catch (err) { + if (err.code === 'EEXIST') { + fixOwnerSync(cache, p) + return null + } else { + throw err + } + } +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js new file mode 100644 index 0000000000000..3739cea3df281 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js @@ -0,0 +1,69 @@ +'use strict' + +const fs = require('fs') +const util = require('util') +const chmod = util.promisify(fs.chmod) +const unlink = util.promisify(fs.unlink) +const stat = util.promisify(fs.stat) +const move = require('@npmcli/move-file') +const pinflight = require('promise-inflight') + +module.exports = moveFile + +function moveFile (src, dest) { + const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || + process.platform === 'win32' + + // This isn't quite an fs.rename -- the assumption is that + // if `dest` already exists, and we get certain errors while + // trying to move it, we should just not bother. + // + // In the case of cache corruption, users will receive an + // EINTEGRITY error elsewhere, and can remove the offending + // content their own way. + // + // Note that, as the name suggests, this strictly only supports file moves. + return new Promise((resolve, reject) => { + fs.link(src, dest, (err) => { + if (err) { + if (isWindows && err.code === 'EPERM') { + // XXX This is a really weird way to handle this situation, as it + // results in the src file being deleted even though the dest + // might not exist. Since we pretty much always write files to + // deterministic locations based on content hash, this is likely + // ok (or at worst, just ends in a future cache miss). But it would + // be worth investigating at some time in the future if this is + // really what we want to do here. + return resolve() + } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { + // file already exists, so whatever + return resolve() + } else { + return reject(err) + } + } else { + return resolve() + } + }) + }) + .then(() => { + // content should never change for any reason, so make it read-only + return Promise.all([ + unlink(src), + !isWindows && chmod(dest, '0444'), + ]) + }) + .catch(() => { + return pinflight('cacache-move-file:' + dest, () => { + return stat(dest).catch((err) => { + if (err.code !== 'ENOENT') { + // Something else is wrong here. Bail bail bail + throw err + } + // file doesn't already exist! let's try a rename -> copy fallback + // only delete if it successfully copies + return move(src, dest) + }) + }) + }) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0a5a50eba3061 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,35 @@ +'use strict' + +const fs = require('@npmcli/fs') + +const fixOwner = require('./fix-owner') +const path = require('path') + +module.exports.mkdir = mktmpdir + +function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + .then(() => { + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) + }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) +} + +module.exports.fix = fixtmpdir + +function fixtmpdir (cache) { + return fixOwner(cache, path.join(cache, 'tmp')) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..300cd9f9de1c4 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js @@ -0,0 +1,291 @@ +'use strict' + +const util = require('util') + +const pMap = require('p-map') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const fs = require('fs') +const fsm = require('fs-minipass') +const glob = util.promisify(require('glob')) +const index = require('./entry-index') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const stat = util.promisify(fs.stat) +const truncate = util.promisify(fs.truncate) +const writeFile = util.promisify(fs.writeFile) +const readFile = util.promisify(fs.readFile) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + return steps + .reduce((promise, step, i) => { + const label = step.name + const start = new Date() + return promise.then((stats) => { + return step(cache, opts).then((s) => { + s && + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + + stats.runTime[label] = end - start + return Promise.resolve(stats) + }) + }) + }, Promise.resolve({})) + .then((stats) => { + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats + }) +} + +function markStartTime (cache, opts) { + return Promise.resolve({ startTime: new Date() }) +} + +function markEndTime (cache, opts) { + return Promise.resolve({ endTime: new Date() }) +} + +function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + return fixOwner + .mkdirfix(cache, cache) + .then(() => { + // TODO - fix file permissions too + return fixOwner.chownr(cache, cache) + }) + .then(() => null) +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rimraf it. +// +function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + liveContent.add(entry.integrity.toString()) + }) + return new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }).then(() => { + const contentDir = contentPath.contentDir(cache) + return glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }).then((files) => { + return Promise.resolve({ + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + }).then((stats) => + pMap( + files, + (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + return verifyContent(f, integrity).then((info) => { + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + return stats + }) + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + return stat(f).then((s) => { + return rimraf(f).then(() => { + stats.reclaimedSize += s.size + return stats + }) + }) + } + }, + { concurrency: opts.concurrency } + ).then(() => stats) + ) + }) + }) +} + +function verifyContent (filepath, sri) { + return stat(filepath) + .then((s) => { + const contentInfo = { + size: s.size, + valid: true, + } + return ssri + .checkStream(new fsm.ReadStream(filepath), sri) + .catch((err) => { + if (err.code !== 'EINTEGRITY') { + throw err + } + + return rimraf(filepath).then(() => { + contentInfo.valid = false + }) + }) + .then(() => contentInfo) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + + throw err + }) +} + +function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + return index.ls(cache).then((entries) => { + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + return pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ).then(() => stats) + }) +} + +function rebuildBucket (cache, bucket, stats, opts) { + return truncate(bucket._path).then(() => { + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + return bucket.reduce((promise, entry) => { + return promise.then(() => { + const content = contentPath(cache, entry.integrity) + return stat(content) + .then(() => { + return index + .insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + }) + .then(() => { + stats.totalEntries++ + }) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + return + } + throw err + }) + }) + }, Promise.resolve()) + }) +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rimraf(path.join(cache, 'tmp')) +} + +function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + try { + return writeFile(verifile, '' + +new Date()) + } finally { + fixOwner.chownr.sync(cache, verifile) + } +} + +module.exports.lastRun = lastRun + +function lastRun (cache) { + return readFile(path.join(cache, '_lastverified'), 'utf8').then( + (data) => new Date(+data) + ) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..19129e315fe59 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..573b6b85b9779 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..43b7502c3e7c7 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "6.0.0", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "tap": "^14.10.7" + }, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "files": [ + "index.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/package.json new file mode 100644 index 0000000000000..b9efa92d9f3e0 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/package.json @@ -0,0 +1,88 @@ +{ + "name": "cacache", + "version": "16.0.0", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin", + "lib" + ], + "scripts": { + "benchmarks": "node test/benchmarks", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint '**/*.js'", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "posttest": "npm run lint" + }, + "repository": "https://github.com/npm/cacache", + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + }, + "devDependencies": { + "@npmcli/template-oss": "^2.9.2", + "benchmark": "^2.1.4", + "chalk": "^4.0.0", + "require-inject": "^1.4.4", + "tacks": "^1.3.0", + "tap": "^15.0.9" + }, + "tap": { + "100": true, + "test-regex": "test/[^/]*.js" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "windowsCI": false, + "version": "2.9.2" + }, + "author": "GitHub Inc." +} diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js b/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js deleted file mode 100644 index b9c8be2ffa2e4..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js +++ /dev/null @@ -1 +0,0 @@ -(()=>{var t={10:t=>{const i="object"==typeof performance&&performance&&"function"==typeof performance.now?performance:Date,s=new Set,e=(t,i)=>{const s=`LRU_CACHE_OPTION_${t}`;l(s)&&o(s,`${t} option`,`options.${i}`,d)},h=(t,i)=>{const s=`LRU_CACHE_METHOD_${t}`;if(l(s)){const{prototype:e}=d,{get:h}=Object.getOwnPropertyDescriptor(e,t);o(s,`${t} method`,`cache.${i}()`,h)}},l=t=>"object"==typeof process&&process&&!(process.noDeprecation||s.has(t)),o=(t,i,e,h)=>{s.add(t),process.emitWarning(`The ${i} is deprecated. Please use ${e} instead.`,"DeprecationWarning",t,h)},a=t=>t&&t===Math.floor(t)&&t>0&&isFinite(t),n=t=>a(t)?t<=Math.pow(2,8)?Uint8Array:t<=Math.pow(2,16)?Uint16Array:t<=Math.pow(2,32)?Uint32Array:t<=Number.MAX_SAFE_INTEGER?r:null:null;class r extends Array{constructor(t){super(t),this.fill(0)}}class p{constructor(t){const i=n(t);this.heap=new i(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}}class d{constructor(t={}){const{max:i,ttl:s,ttlResolution:h=1,ttlAutopurge:l,updateAgeOnGet:o,allowStale:r,dispose:c,disposeAfter:f,noDisposeOnSet:u,noUpdateTTL:z,maxSize:v,sizeCalculation:y}=t,{length:S,maxAge:g,stale:m}=t instanceof d?{}:t;if(!a(i))throw new TypeError("max option must be an integer");const x=n(i);if(!x)throw new Error("invalid max value: "+i);if(this.max=i,this.maxSize=v||0,this.sizeCalculation=y||S,this.sizeCalculation){if(!this.maxSize)throw new TypeError("cannot set sizeCalculation without setting maxSize");if("function"!=typeof this.sizeCalculation)throw new TypeError("sizeCalculating set to non-function")}if(this.keyMap=new Map,this.keyList=new Array(i).fill(null),this.valList=new Array(i).fill(null),this.next=new x(i),this.prev=new x(i),this.head=0,this.tail=0,this.free=new p(i),this.initialFill=1,this.size=0,"function"==typeof c&&(this.dispose=c),"function"==typeof f?(this.disposeAfter=f,this.disposed=[]):(this.disposeAfter=null,this.disposed=null),this.noDisposeOnSet=!!u,this.noUpdateTTL=!!z,this.maxSize){if(!a(this.maxSize))throw new TypeError("maxSize must be a positive integer if specified");this.initializeSizeTracking()}if(this.allowStale=!!r||!!m,this.updateAgeOnGet=!!o,this.ttlResolution=a(h)||0===h?h:1,this.ttlAutopurge=!!l,this.ttl=s||g||0,this.ttl){if(!a(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.initializeTTLTracking()}m&&e("stale","allowStale"),g&&e("maxAge","ttl"),S&&e("length","sizeCalculation")}initializeTTLTracking(){this.ttls=new r(this.max),this.starts=new r(this.max),this.setItemTTL=(t,s)=>{if(this.starts[t]=0!==s?i.now():0,this.ttls[t]=s,0!==s&&this.ttlAutopurge){const i=setTimeout((()=>{this.isStale(t)&&this.delete(this.keyList[t])}),s+1);i.unref&&i.unref()}},this.updateItemAge=t=>{this.starts[t]=0!==this.ttls[t]?i.now():0};let t=0;const s=()=>{const s=i.now();if(this.ttlResolution>0){t=s;const i=setTimeout((()=>t=0),this.ttlResolution);i.unref&&i.unref()}return s};this.isStale=i=>0!==this.ttls[i]&&0!==this.starts[i]&&(t||s())-this.starts[i]>this.ttls[i]}updateItemAge(t){}setItemTTL(t,i){}isStale(t){return!1}initializeSizeTracking(){this.calculatedSize=0,this.sizes=new r(this.max),this.removeItemSize=t=>this.calculatedSize-=this.sizes[t],this.addItemSize=(t,i,s,e,h)=>{const l=e||(h?h(i,s):0);this.sizes[t]=a(l)?l:0;const o=this.maxSize-this.sizes[t];for(;this.calculatedSize>o;)this.evict();this.calculatedSize+=this.sizes[t]},this.delete=t=>{if(0!==this.size){const i=this.keyMap.get(t);void 0!==i&&(this.calculatedSize-=this.sizes[i])}return d.prototype.delete.call(this,t)}}removeItemSize(t){}addItemSize(t,i,s,e,h){}*indexes({allowStale:t=this.allowStale}={}){if(this.size)for(let i=this.tail;!t&&this.isStale(i)||(yield i),i!==this.head;i=this.prev[i]);}*rindexes({allowStale:t=this.allowStale}={}){if(this.size)for(let i=this.head;!t&&this.isStale(i)||(yield i),i!==this.tail;i=this.next[i]);}*entries(){for(const t of this.indexes())yield[this.keyList[t],this.valList[t]]}*keys(){for(const t of this.indexes())yield this.keyList[t]}*values(){for(const t of this.indexes())yield this.valList[t]}[Symbol.iterator](){return this.entries()}find(t,i={}){for(const s of this.indexes())if(t(this.valList[s],this.keyList[s],this))return this.get(this.keyList[s],i)}forEach(t,i=this){for(const s of this.indexes())t.call(i,this.valList[s],this.keyList[s],this)}rforEach(t,i=this){for(const s of this.rindexes())t.call(i,this.valList[s],this.keyList[s],this)}get prune(){return h("prune","purgeStale"),this.purgeStale}purgeStale(){let t=!1;for(const i of this.rindexes({allowStale:!0}))this.isStale(i)&&(this.delete(this.keyList[i]),t=!0);return t}dump(){const t=[];for(const i of this.indexes()){const s=this.keyList[i],e={value:this.valList[i]};this.ttls&&(e.ttl=this.ttls[i]),this.sizes&&(e.size=this.sizes[i]),t.unshift([s,e])}return t}load(t){this.clear();for(const[i,s]of t)this.set(i,s.value,s)}dispose(t,i,s){}set(t,i,{ttl:s=this.ttl,noDisposeOnSet:e=this.noDisposeOnSet,size:h=0,sizeCalculation:l=this.sizeCalculation,noUpdateTTL:o=this.noUpdateTTL}={}){let a=0===this.size?void 0:this.keyMap.get(t);if(void 0===a)a=this.newIndex(),this.keyList[a]=t,this.valList[a]=i,this.keyMap.set(t,a),this.next[this.tail]=a,this.prev[a]=this.tail,this.tail=a,this.size++,this.addItemSize(a,i,t,h,l),o=!1;else{const s=this.valList[a];i!==s&&(e||(this.dispose(s,t,"set"),this.disposeAfter&&this.disposed.push([s,t,"set"])),this.removeItemSize(a),this.valList[a]=i,this.addItemSize(a,i,t,h,l)),this.moveToTail(a)}if(0===s||0!==this.ttl||this.ttls||this.initializeTTLTracking(),o||this.setItemTTL(a,s),this.disposeAfter)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return this}newIndex(){return 0===this.size?this.tail:this.size===this.max?this.evict():0!==this.free.length?this.free.pop():this.initialFill++}pop(){if(this.size){const t=this.valList[this.head];return this.evict(),t}}evict(){const t=this.head,i=this.keyList[t],s=this.valList[t];return this.dispose(s,i,"evict"),this.disposeAfter&&this.disposed.push([s,i,"evict"]),this.removeItemSize(t),this.head=this.next[t],this.keyMap.delete(i),this.size--,t}has(t){return this.keyMap.has(t)&&!this.isStale(this.keyMap.get(t))}peek(t,{allowStale:i=this.allowStale}={}){const s=this.keyMap.get(t);if(void 0!==s&&(i||!this.isStale(s)))return this.valList[s]}get(t,{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet}={}){const e=this.keyMap.get(t);if(void 0!==e){if(this.isStale(e)){const s=i?this.valList[e]:void 0;return this.delete(t),s}return this.moveToTail(e),s&&this.updateItemAge(e),this.valList[e]}}connect(t,i){this.prev[i]=t,this.next[t]=i}moveToTail(t){t!==this.tail&&(t===this.head?this.head=this.next[t]:this.connect(this.prev[t],this.next[t]),this.connect(this.tail,t),this.tail=t)}get del(){return h("del","delete"),this.delete}delete(t){let i=!1;if(0!==this.size){const s=this.keyMap.get(t);void 0!==s&&(i=!0,1===this.size?this.clear():(this.removeItemSize(s),this.dispose(this.valList[s],t,"delete"),this.disposeAfter&&this.disposed.push([this.valList[s],t,"delete"]),this.keyMap.delete(t),this.keyList[s]=null,this.valList[s]=null,s===this.tail?this.tail=this.prev[s]:s===this.head?this.head=this.next[s]:(this.next[this.prev[s]]=this.next[s],this.prev[this.next[s]]=this.prev[s]),this.size--,this.free.push(s)))}if(this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return i}clear(){if(this.dispose!==d.prototype.dispose)for(const t of this.rindexes({allowStale:!0}))this.dispose(this.valList[t],this.keyList[t],"delete");if(this.disposeAfter)for(const t of this.rindexes({allowStale:!0}))this.disposed.push([this.valList[t],this.keyList[t],"delete"]);if(this.keyMap.clear(),this.valList.fill(null),this.keyList.fill(null),this.ttls&&(this.ttls.fill(0),this.starts.fill(0)),this.sizes&&this.sizes.fill(0),this.head=0,this.tail=0,this.initialFill=1,this.free.length=0,this.calculatedSize=0,this.size=0,this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift())}get reset(){return h("reset","clear"),this.clear}get length(){return((t,i)=>{const s="LRU_CACHE_PROPERTY_length";if(l(s)){const{prototype:i}=d,{get:e}=Object.getOwnPropertyDescriptor(i,t);o(s,"length property","cache.size",e)}})("length"),this.size}}t.exports=d}},i={},s=function s(e){var h=i[e];if(void 0!==h)return h.exports;var l=i[e]={exports:{}};return t[e](l,l.exports,s),l.exports}(10);module.exports=s})(); \ No newline at end of file diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs b/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs deleted file mode 100644 index 3a4d674c07a41..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs +++ /dev/null @@ -1 +0,0 @@ -var t={10:t=>{const i="object"==typeof performance&&performance&&"function"==typeof performance.now?performance:Date,s=new Set,e=(t,i)=>{const s=`LRU_CACHE_OPTION_${t}`;l(s)&&o(s,`${t} option`,`options.${i}`,d)},h=(t,i)=>{const s=`LRU_CACHE_METHOD_${t}`;if(l(s)){const{prototype:e}=d,{get:h}=Object.getOwnPropertyDescriptor(e,t);o(s,`${t} method`,`cache.${i}()`,h)}},l=t=>!(process.noDeprecation||s.has(t)),o=(t,i,e,h)=>{s.add(t),process.emitWarning(`The ${i} is deprecated. Please use ${e} instead.`,"DeprecationWarning",t,h)},n=t=>t&&t===Math.floor(t)&&t>0&&isFinite(t),a=t=>n(t)?t<=Math.pow(2,8)?Uint8Array:t<=Math.pow(2,16)?Uint16Array:t<=Math.pow(2,32)?Uint32Array:t<=Number.MAX_SAFE_INTEGER?r:null:null;class r extends Array{constructor(t){super(t),this.fill(0)}}class p{constructor(t){const i=a(t);this.heap=new i(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}}class d{constructor(t={}){const{max:i,ttl:s,ttlResolution:h=1,ttlAutopurge:l,updateAgeOnGet:o,allowStale:r,dispose:c,disposeAfter:f,noDisposeOnSet:u,noUpdateTTL:z,maxSize:v,sizeCalculation:y}=t,{length:g,maxAge:m,stale:S}=t instanceof d?{}:t;if(!n(i))throw new TypeError("max option must be an integer");const L=a(i);if(!L)throw new Error("invalid max value: "+i);if(this.max=i,this.maxSize=v||0,this.sizeCalculation=y||g,this.sizeCalculation){if(!this.maxSize)throw new TypeError("cannot set sizeCalculation without setting maxSize");if("function"!=typeof this.sizeCalculation)throw new TypeError("sizeCalculating set to non-function")}if(this.keyMap=new Map,this.keyList=new Array(i).fill(null),this.valList=new Array(i).fill(null),this.next=new L(i),this.prev=new L(i),this.head=0,this.tail=0,this.free=new p(i),this.initialFill=1,this.size=0,"function"==typeof c&&(this.dispose=c),"function"==typeof f?(this.disposeAfter=f,this.disposed=[]):(this.disposeAfter=null,this.disposed=null),this.noDisposeOnSet=!!u,this.noUpdateTTL=!!z,this.maxSize){if(!n(this.maxSize))throw new TypeError("maxSize must be a positive integer if specified");this.initializeSizeTracking()}if(this.allowStale=!!r||!!S,this.updateAgeOnGet=!!o,this.ttlResolution=n(h)||0===h?h:1,this.ttlAutopurge=!!l,this.ttl=s||m||0,this.ttl){if(!n(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.initializeTTLTracking()}S&&e("stale","allowStale"),m&&e("maxAge","ttl"),g&&e("length","sizeCalculation")}initializeTTLTracking(){this.ttls=new r(this.max),this.starts=new r(this.max),this.setItemTTL=(t,s)=>{if(this.starts[t]=0!==s?i.now():0,this.ttls[t]=s,0!==s&&this.ttlAutopurge){const i=setTimeout((()=>{this.isStale(t)&&this.delete(this.keyList[t])}),s+1);i.unref&&i.unref()}},this.updateItemAge=t=>{this.starts[t]=0!==this.ttls[t]?i.now():0};let t=0;const s=()=>{const s=i.now();if(this.ttlResolution>0){t=s;const i=setTimeout((()=>t=0),this.ttlResolution);i.unref&&i.unref()}return s};this.isStale=i=>0!==this.ttls[i]&&0!==this.starts[i]&&(t||s())-this.starts[i]>this.ttls[i]}updateItemAge(t){}setItemTTL(t,i){}isStale(t){return!1}initializeSizeTracking(){this.calculatedSize=0,this.sizes=new r(this.max),this.removeItemSize=t=>this.calculatedSize-=this.sizes[t],this.addItemSize=(t,i,s,e,h)=>{const l=e||(h?h(i,s):0);this.sizes[t]=n(l)?l:0;const o=this.maxSize-this.sizes[t];for(;this.calculatedSize>o;)this.evict();this.calculatedSize+=this.sizes[t]},this.delete=t=>{if(0!==this.size){const i=this.keyMap.get(t);void 0!==i&&(this.calculatedSize-=this.sizes[i])}return d.prototype.delete.call(this,t)}}removeItemSize(t){}addItemSize(t,i,s,e,h){}*indexes(){if(this.size)for(let t=this.tail;this.isStale(t)||(yield t),t!==this.head;t=this.prev[t]);}*rindexes(){if(this.size)for(let t=this.head;this.isStale(t)||(yield t),t!==this.tail;t=this.next[t]);}*entries(){for(const t of this.indexes())yield[this.keyList[t],this.valList[t]]}*keys(){for(const t of this.indexes())yield this.keyList[t]}*values(){for(const t of this.indexes())yield this.valList[t]}[Symbol.iterator](){return this.entries()}find(t,i={}){for(const s of this.indexes())if(t(this.valList[s],this.keyList[s],this))return this.get(this.keyList[s],i)}forEach(t,i=this){for(const s of this.indexes())t.call(i,this.valList[s],this.keyList[s],this)}rforEach(t,i=this){for(const s of this.rindexes())t.call(i,this.valList[s],this.keyList[s],this)}get prune(){return h("prune","purgeStale"),this.purgeStale}purgeStale(){let t=!1;if(this.size)for(let i=this.head;;i=this.next[i]){const s=i===this.tail;if(this.isStale(i)&&(this.delete(this.keyList[i]),t=!0),s)break}return t}dump(){const t=[];for(const i of this.indexes()){const s=this.keyList[i],e={value:this.valList[i]};this.ttls&&(e.ttl=this.ttls[i]),this.sizes&&(e.size=this.sizes[i]),t.unshift([s,e])}return t}load(t){this.clear();for(const[i,s]of t)this.set(i,s.value,s)}dispose(t,i,s){}set(t,i,{ttl:s=this.ttl,noDisposeOnSet:e=this.noDisposeOnSet,size:h=0,sizeCalculation:l=this.sizeCalculation,noUpdateTTL:o=this.noUpdateTTL}={}){let n=0===this.size?void 0:this.keyMap.get(t);if(void 0===n)n=this.newIndex(),this.keyList[n]=t,this.valList[n]=i,this.keyMap.set(t,n),this.next[this.tail]=n,this.prev[n]=this.tail,this.tail=n,this.size++,this.addItemSize(n,i,t,h,l),o=!1;else{const s=this.valList[n];i!==s&&(e||(this.dispose(s,t,"set"),this.disposeAfter&&this.disposed.push([s,t,"set"])),this.removeItemSize(n),this.valList[n]=i,this.addItemSize(n,i,t,h,l)),this.moveToTail(n)}if(0===s||0!==this.ttl||this.ttls||this.initializeTTLTracking(),o||this.setItemTTL(n,s),this.disposeAfter)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return this}newIndex(){return 0===this.size?this.tail:this.size===this.max?this.evict():0!==this.free.length?this.free.pop():this.initialFill++}pop(){if(this.size){const t=this.valList[this.head];return this.evict(),t}}evict(){const t=this.head,i=this.keyList[t],s=this.valList[t];return this.dispose(s,i,"evict"),this.disposeAfter&&this.disposed.push([s,i,"evict"]),this.removeItemSize(t),this.head=this.next[t],this.keyMap.delete(i),this.size--,t}has(t){return this.keyMap.has(t)&&!this.isStale(this.keyMap.get(t))}peek(t,{allowStale:i=this.allowStale}={}){const s=this.keyMap.get(t);if(void 0!==s&&(i||!this.isStale(s)))return this.valList[s]}get(t,{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet}={}){const e=this.keyMap.get(t);if(void 0!==e){if(this.isStale(e)){const s=i?this.valList[e]:void 0;return this.delete(t),s}return this.moveToTail(e),s&&this.updateItemAge(e),this.valList[e]}}connect(t,i){this.prev[i]=t,this.next[t]=i}moveToTail(t){t!==this.tail&&(t===this.head?this.head=this.next[t]:this.connect(this.prev[t],this.next[t]),this.connect(this.tail,t),this.tail=t)}get del(){return h("del","delete"),this.delete}delete(t){let i=!1;if(0!==this.size){const s=this.keyMap.get(t);void 0!==s&&(i=!0,1===this.size?this.clear():(this.removeItemSize(s),this.dispose(this.valList[s],t,"delete"),this.disposeAfter&&this.disposed.push([this.valList[s],t,"delete"]),this.keyMap.delete(t),this.keyList[s]=null,this.valList[s]=null,s===this.tail?this.tail=this.prev[s]:s===this.head?this.head=this.next[s]:(this.next[this.prev[s]]=this.next[s],this.prev[this.next[s]]=this.prev[s]),this.size--,this.free.push(s)))}if(this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return i}clear(){if(this.dispose!==d.prototype.dispose)for(const t of this.rindexes())this.dispose(this.valList[t],this.keyList[t],"delete");if(this.disposeAfter)for(const t of this.rindexes())this.disposed.push([this.valList[t],this.keyList[t],"delete"]);if(this.keyMap.clear(),this.valList.fill(null),this.keyList.fill(null),this.ttls&&(this.ttls.fill(0),this.starts.fill(0)),this.sizes&&this.sizes.fill(0),this.head=0,this.tail=0,this.initialFill=1,this.free.length=0,this.calculatedSize=0,this.size=0,this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift())}get reset(){return h("reset","clear"),this.clear}get length(){return((t,i)=>{const s="LRU_CACHE_PROPERTY_length";if(l(s)){const{prototype:i}=d,{get:e}=Object.getOwnPropertyDescriptor(i,t);o(s,"length property","cache.size",e)}})("length"),this.size}}t.exports=d}},i={};!function s(e){var h=i[e];if(void 0!==h)return h.exports;var l=i[e]={exports:{}};return t[e](l,l.exports,s),l.exports}(10); \ No newline at end of file diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/index.js b/node_modules/make-fetch-happen/node_modules/lru-cache/index.js index 77adaa6451a0c..e37f51616452e 100644 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/index.js +++ b/node_modules/make-fetch-happen/node_modules/lru-cache/index.js @@ -243,46 +243,78 @@ class LRUCache { *indexes ({ allowStale = this.allowStale } = {}) { if (this.size) { - for (let i = this.tail; true; i = this.prev[i]) { + for (let i = this.tail, j; true; ) { + if (!this.isValidIndex(i)) { + break + } + j = i === this.head if (allowStale || !this.isStale(i)) { yield i } if (i === this.head) { break + } else { + i = this.prev[i] } } } } + *rindexes ({ allowStale = this.allowStale } = {}) { if (this.size) { - for (let i = this.head; true; i = this.next[i]) { + for (let i = this.head, j; true; ) { + if (!this.isValidIndex(i)) { + break + } if (allowStale || !this.isStale(i)) { yield i } + // either the tail now, or WAS the tail, and deleted if (i === this.tail) { break + } else { + i = this.next[i] } } } } + isValidIndex (index) { + return this.keyMap.get(this.keyList[index]) === index + } + *entries () { for (const i of this.indexes()) { yield [this.keyList[i], this.valList[i]] } } + *rentries () { + for (const i of this.rindexes()) { + yield [this.keyList[i], this.valList[i]] + } + } *keys () { for (const i of this.indexes()) { yield this.keyList[i] } } + *rkeys () { + for (const i of this.rindexes()) { + yield this.keyList[i] + } + } *values () { for (const i of this.indexes()) { yield this.valList[i] } } + *rvalues () { + for (const i of this.rindexes()) { + yield this.valList[i] + } + } [Symbol.iterator] () { return this.entries() diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json index 313bfcbbfd206..a62f74c2b648a 100644 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json +++ b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.4.2", + "version": "7.5.1", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -9,9 +9,7 @@ "cache" ], "scripts": { - "prepare": "webpack-cli -o bundle ./index.js --node-env production", - "build": "npm run prepare", - "presize": "npm run prepare", + "build": "", "test": "tap", "snap": "tap", "size": "size-limit", @@ -20,23 +18,16 @@ "prepublishOnly": "git push origin --follow-tags" }, "main": "index.js", - "browser": "./bundle/main.js", - "exports": { - ".": "./index.js", - "./browser": "./bundle/main.js" - }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", "benchmark": "^2.1.4", "size-limit": "^7.0.8", - "tap": "^15.1.6", - "webpack-cli": "^4.9.2" + "tap": "^15.1.6" }, "license": "ISC", "files": [ - "index.js", - "bundle" + "index.js" ], "engines": { "node": ">=12" @@ -46,7 +37,7 @@ }, "size-limit": [ { - "path": "./bundle/main.js" + "path": "./index.js" } ] } diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index c8c5842b33a9f..e52131b8a8e01 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "10.0.5", + "version": "10.0.6", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -34,15 +34,15 @@ "license": "ISC", "dependencies": { "agentkeepalive": "^4.2.1", - "cacache": "^15.3.0", + "cacache": "^16.0.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.4.1", + "lru-cache": "^7.5.1", "minipass": "^3.1.6", "minipass-collect": "^1.0.2", - "minipass-fetch": "^2.0.2", + "minipass-fetch": "^2.0.3", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", @@ -52,7 +52,7 @@ }, "devDependencies": { "@npmcli/template-oss": "^2.9.2", - "eslint": "^8.10.0", + "eslint": "^8.11.0", "mkdirp": "^1.0.4", "nock": "^13.2.4", "rimraf": "^3.0.2", diff --git a/node_modules/minipass-fetch/lib/index.js b/node_modules/minipass-fetch/lib/index.js index 473630e1a5857..22257a417aff8 100644 --- a/node_modules/minipass-fetch/lib/index.js +++ b/node_modules/minipass-fetch/lib/index.js @@ -204,6 +204,14 @@ const fetch = async (url, opts) => { timeout: request.timeout, } + // if the redirect is to a new hostname, strip the authorization and cookie headers + const parsedOriginal = new URL(request.url) + const parsedRedirect = new URL(locationURL) + if (parsedOriginal.hostname !== parsedRedirect.hostname) { + requestOpts.headers.delete('authorization') + requestOpts.headers.delete('cookie') + } + // HTTP-redirect fetch step 11 if (res.statusCode === 303 || ( (res.statusCode === 301 || res.statusCode === 302) && diff --git a/node_modules/minipass-fetch/package.json b/node_modules/minipass-fetch/package.json index 47e32dad6df8b..68e1ce134cd26 100644 --- a/node_modules/minipass-fetch/package.json +++ b/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "2.0.2", + "version": "2.0.3", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", @@ -22,11 +22,12 @@ "check-coverage": true }, "devDependencies": { - "@npmcli/template-oss": "^2.8.1", + "@npmcli/template-oss": "^2.9.2", "@ungap/url-search-params": "^0.2.2", "abort-controller": "^3.0.0", "abortcontroller-polyfill": "~1.7.3", "form-data": "^4.0.0", + "nock": "^13.2.4", "parted": "^0.1.1", "string-to-arraybuffer": "^1.0.2", "tap": "^15.1.6" @@ -58,6 +59,6 @@ }, "author": "GitHub Inc.", "templateOSS": { - "version": "2.8.1" + "version": "2.9.2" } } diff --git a/package-lock.json b/package-lock.json index 413f6b930892e..fec5f4c6c13cc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -122,7 +122,7 @@ "libnpmsearch": "^5.0.1", "libnpmteam": "^4.0.1", "libnpmversion": "^3.0.1", - "make-fetch-happen": "^10.0.5", + "make-fetch-happen": "^10.0.6", "minipass": "^3.1.6", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -5014,21 +5014,21 @@ "peer": true }, "node_modules/make-fetch-happen": { - "version": "10.0.5", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.5.tgz", - "integrity": "sha512-0JQ0daMRDFEv14DelmcFlprdhSDNG7WEgInTjBeWYWZ78W0jfDqygZdPLhcrQ4s/G8skNhBrS4fiF6xA+YlFjQ==", + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.6.tgz", + "integrity": "sha512-4Gfh6lV3TLXmj7qz79hBFuvVqjYSMW6v2+sxtdX4LFQU0rK3V/txRjE0DoZb7X0IF3t9f8NO3CxPSWlvdckhVA==", "inBundle": true, "dependencies": { "agentkeepalive": "^4.2.1", - "cacache": "^15.3.0", + "cacache": "^16.0.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.4.1", + "lru-cache": "^7.5.1", "minipass": "^3.1.6", "minipass-collect": "^1.0.2", - "minipass-fetch": "^2.0.2", + "minipass-fetch": "^2.0.3", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", @@ -5040,10 +5040,51 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, + "node_modules/make-fetch-happen/node_modules/cacache": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", + "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", + "inBundle": true, + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, + "node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "inBundle": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/make-fetch-happen/node_modules/lru-cache": { - "version": "7.4.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.4.2.tgz", - "integrity": "sha512-Xs3+hFPDSKQmL05Gs6NhvAADol1u9TmLoNoE03ZjszX6a5iYIO3rPUM4jIjoBUJeTaWEBMozjjmV70gvdRfIdw==", + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", "inBundle": true, "engines": { "node": ">=12" @@ -5218,9 +5259,9 @@ } }, "node_modules/minipass-fetch": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.0.2.tgz", - "integrity": "sha512-M63u5yWX0yxY1C3DcLVY1xWai0pNM3qa1xCMXFgdejY5F/NTmyzNVHGcBxKerX51lssqxwWWTjpg/ZPuD39gOQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.0.3.tgz", + "integrity": "sha512-VA+eiiUtaIvpQJXISwE3OiMvQwAWrgKb97F0aXlCS1Ahikr8fEQq8m3Hf7Kv9KT3nokuHigJKsDMB6atU04olQ==", "inBundle": true, "dependencies": { "minipass": "^3.1.6", @@ -14411,7 +14452,7 @@ "eslint": "^8.1.0", "minimatch": "^3.0.4", "npm-package-arg": "^9.0.0", - "pacote": "13.0.4", + "pacote": "^13.0.4", "tap": "^15.0.9", "tar": "^6.1.0" } @@ -14428,7 +14469,7 @@ "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^9.0.0", "npmlog": "^6.0.1", - "pacote": "13.0.4", + "pacote": "^13.0.4", "proc-log": "^2.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", @@ -14535,7 +14576,7 @@ "@npmcli/template-oss": "^2.4.2", "nock": "^13.0.7", "npm-package-arg": "^9.0.0", - "pacote": "13.0.4", + "pacote": "^13.0.4", "tap": "^15.0.0" } }, @@ -14801,20 +14842,20 @@ "peer": true }, "make-fetch-happen": { - "version": "10.0.5", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.5.tgz", - "integrity": "sha512-0JQ0daMRDFEv14DelmcFlprdhSDNG7WEgInTjBeWYWZ78W0jfDqygZdPLhcrQ4s/G8skNhBrS4fiF6xA+YlFjQ==", + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.6.tgz", + "integrity": "sha512-4Gfh6lV3TLXmj7qz79hBFuvVqjYSMW6v2+sxtdX4LFQU0rK3V/txRjE0DoZb7X0IF3t9f8NO3CxPSWlvdckhVA==", "requires": { "agentkeepalive": "^4.2.1", - "cacache": "^15.3.0", + "cacache": "^16.0.0", "http-cache-semantics": "^4.1.0", "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.4.1", + "lru-cache": "^7.5.1", "minipass": "^3.1.6", "minipass-collect": "^1.0.2", - "minipass-fetch": "^2.0.2", + "minipass-fetch": "^2.0.3", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", @@ -14823,10 +14864,45 @@ "ssri": "^8.0.1" }, "dependencies": { + "cacache": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", + "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", + "requires": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.1.2", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.11", + "unique-filename": "^1.1.1" + }, + "dependencies": { + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + } + } + }, "lru-cache": { - "version": "7.4.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.4.2.tgz", - "integrity": "sha512-Xs3+hFPDSKQmL05Gs6NhvAADol1u9TmLoNoE03ZjszX6a5iYIO3rPUM4jIjoBUJeTaWEBMozjjmV70gvdRfIdw==" + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" } } }, @@ -14948,9 +15024,9 @@ } }, "minipass-fetch": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.0.2.tgz", - "integrity": "sha512-M63u5yWX0yxY1C3DcLVY1xWai0pNM3qa1xCMXFgdejY5F/NTmyzNVHGcBxKerX51lssqxwWWTjpg/ZPuD39gOQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.0.3.tgz", + "integrity": "sha512-VA+eiiUtaIvpQJXISwE3OiMvQwAWrgKb97F0aXlCS1Ahikr8fEQq8m3Hf7Kv9KT3nokuHigJKsDMB6atU04olQ==", "requires": { "encoding": "^0.1.13", "minipass": "^3.1.6", diff --git a/package.json b/package.json index 41df58368a863..aee3b139aadbc 100644 --- a/package.json +++ b/package.json @@ -90,7 +90,7 @@ "libnpmsearch": "^5.0.1", "libnpmteam": "^4.0.1", "libnpmversion": "^3.0.1", - "make-fetch-happen": "^10.0.5", + "make-fetch-happen": "^10.0.6", "minipass": "^3.1.6", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", From d41e5b2c297872b91dd994ab1fffe9fd7489900f Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 14 Mar 2022 13:53:34 -0700 Subject: [PATCH 04/11] deps: cacache@16.0.0 --- .../node_modules/cacache/LICENSE.md | 16 - .../node_modules/cacache/lib/content/path.js | 29 -- .../node_modules/cacache/lib/content/read.js | 259 ----------- .../node_modules/cacache/lib/content/rm.js | 20 - .../node_modules/cacache/lib/content/write.js | 194 --------- .../node_modules/cacache/lib/entry-index.js | 412 ------------------ .../node_modules/cacache/lib/memoization.js | 74 ---- .../node_modules/cacache/lib/util/disposer.js | 31 -- .../cacache/lib/util/fix-owner.js | 148 ------- .../cacache/lib/util/hash-to-segments.js | 7 - .../cacache/lib/util/move-file.js | 69 --- .../node_modules/cacache/lib/util/tmp.js | 35 -- .../node_modules/cacache/lib/verify.js | 291 ------------- .../node_modules/cacache/package.json | 88 ---- node_modules/cacache/get.js | 237 ---------- node_modules/cacache/index.js | 46 -- node_modules/cacache/lib/content/read.js | 47 +- node_modules/cacache/lib/content/rm.js | 5 +- node_modules/cacache/lib/content/write.js | 13 +- node_modules/cacache/lib/entry-index.js | 62 ++- .../node_modules => }/cacache/lib/get.js | 0 .../node_modules => }/cacache/lib/index.js | 0 node_modules/cacache/lib/memoization.js | 9 +- .../node_modules => }/cacache/lib/put.js | 0 .../node_modules => }/cacache/lib/rm.js | 0 node_modules/cacache/lib/util/disposer.js | 3 +- node_modules/cacache/lib/util/fix-owner.js | 18 +- node_modules/cacache/lib/util/move-file.js | 6 +- node_modules/cacache/lib/verify.js | 16 +- node_modules/cacache/ls.js | 6 - node_modules/cacache/package.json | 34 +- node_modules/cacache/put.js | 83 ---- node_modules/cacache/rm.js | 31 -- node_modules/cacache/verify.js | 3 - .../node_modules/cacache/LICENSE.md | 16 - .../node_modules/cacache/lib/content/path.js | 29 -- .../node_modules/cacache/lib/content/read.js | 259 ----------- .../node_modules/cacache/lib/content/rm.js | 20 - .../node_modules/cacache/lib/content/write.js | 194 --------- .../node_modules/cacache/lib/entry-index.js | 412 ------------------ .../node_modules/cacache/lib/get.js | 251 ----------- .../node_modules/cacache/lib/index.js | 45 -- .../node_modules/cacache/lib/memoization.js | 74 ---- .../node_modules/cacache/lib/put.js | 87 ---- .../node_modules/cacache/lib/rm.js | 31 -- .../node_modules/cacache/lib/util/disposer.js | 31 -- .../cacache/lib/util/fix-owner.js | 148 ------- .../cacache/lib/util/hash-to-segments.js | 7 - .../cacache/lib/util/move-file.js | 69 --- .../node_modules/cacache/lib/util/tmp.js | 35 -- .../node_modules/cacache/lib/verify.js | 291 ------------- .../cacache/node_modules/lru-cache/LICENSE | 15 - .../cacache/node_modules/lru-cache/index.js | 334 -------------- .../node_modules/lru-cache/package.json | 34 -- .../node_modules/cacache/package.json | 88 ---- .../pacote/node_modules/cacache/LICENSE.md | 16 - .../node_modules/cacache/lib/content/path.js | 29 -- .../node_modules/cacache/lib/content/read.js | 259 ----------- .../node_modules/cacache/lib/content/rm.js | 20 - .../node_modules/cacache/lib/content/write.js | 194 --------- .../node_modules/cacache/lib/entry-index.js | 412 ------------------ .../pacote/node_modules/cacache/lib/get.js | 251 ----------- .../pacote/node_modules/cacache/lib/index.js | 45 -- .../node_modules/cacache/lib/memoization.js | 74 ---- .../pacote/node_modules/cacache/lib/put.js | 87 ---- .../pacote/node_modules/cacache/lib/rm.js | 31 -- .../node_modules/cacache/lib/util/disposer.js | 31 -- .../cacache/lib/util/fix-owner.js | 148 ------- .../cacache/lib/util/hash-to-segments.js | 7 - .../cacache/lib/util/move-file.js | 69 --- .../node_modules/cacache/lib/util/tmp.js | 35 -- .../pacote/node_modules/cacache/lib/verify.js | 291 ------------- .../pacote/node_modules/cacache/package.json | 88 ---- package-lock.json | 227 +--------- package.json | 2 +- workspaces/arborist/package.json | 2 +- 76 files changed, 159 insertions(+), 6921 deletions(-) delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/disposer.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/fix-owner.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/move-file.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json delete mode 100644 node_modules/cacache/get.js delete mode 100644 node_modules/cacache/index.js rename node_modules/{@npmcli/metavuln-calculator/node_modules => }/cacache/lib/get.js (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules => }/cacache/lib/index.js (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules => }/cacache/lib/put.js (100%) rename node_modules/{@npmcli/metavuln-calculator/node_modules => }/cacache/lib/rm.js (100%) delete mode 100644 node_modules/cacache/ls.js delete mode 100644 node_modules/cacache/put.js delete mode 100644 node_modules/cacache/rm.js delete mode 100644 node_modules/cacache/verify.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/get.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/index.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/put.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json delete mode 100644 node_modules/make-fetch-happen/node_modules/cacache/package.json delete mode 100644 node_modules/pacote/node_modules/cacache/LICENSE.md delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/path.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/read.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/rm.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/write.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/entry-index.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/get.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/index.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/memoization.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/put.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/rm.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/disposer.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/fix-owner.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/move-file.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/tmp.js delete mode 100644 node_modules/pacote/node_modules/cacache/lib/verify.js delete mode 100644 node_modules/pacote/node_modules/cacache/package.json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 8bffb2af83cab..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,259 +0,0 @@ -'use strict' - -const util = require('util') - -const fs = require('fs') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -const lstat = util.promisify(fs.lstat) -const readFile = util.promisify(fs.readFile) - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -function read (cache, integrity, opts = {}) { - const { size } = opts - return withContentSri(cache, integrity, (cpath, sri) => { - // get size - return lstat(cpath).then(stat => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - return readFile(cpath, null).then((data) => { - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data - }) - }) -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.sync = readSync - -function readSync (cache, integrity, opts = {}) { - const { size } = opts - return withContentSriSync(cache, integrity, (cpath, sri) => { - const data = fs.readFileSync(cpath) - if (typeof size === 'number' && size !== data.length) { - throw sizeError(size, data.length) - } - - if (ssri.checkData(data, sri)) { - return data - } - - throw integrityError(sri, cpath) - }) -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - withContentSri(cache, integrity, (cpath, sri) => { - // just lstat to ensure it exists - return lstat(cpath).then((stat) => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && size !== stat.size) { - return stream.emit('error', sizeError(size, stat.size)) - } - - readPipeline(cpath, stat.size, sri, stream) - }, er => stream.emit('error', er)) - - return stream -} - -let copyFile -if (fs.copyFile) { - module.exports.copy = copy - module.exports.copy.sync = copySync - copyFile = util.promisify(fs.copyFile) -} - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath, sri) => { - return copyFile(cpath, dest) - }) -} - -function copySync (cache, integrity, dest) { - return withContentSriSync(cache, integrity, (cpath, sri) => { - return fs.copyFileSync(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -function hasContent (cache, integrity) { - if (!integrity) { - return Promise.resolve(false) - } - - return withContentSri(cache, integrity, (cpath, sri) => { - return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) - }).catch((err) => { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - }) -} - -module.exports.hasContent.sync = hasContentSync - -function hasContentSync (cache, integrity) { - if (!integrity) { - return false - } - - return withContentSriSync(cache, integrity, (cpath, sri) => { - try { - const stat = fs.lstatSync(cpath) - return { size: stat.size, sri, stat } - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } - }) -} - -function withContentSri (cache, integrity, fn) { - const tryFn = () => { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - return Promise - .all(digests.map((meta) => { - return withContentSri(cache, meta, fn) - .catch((err) => { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - }) - })) - .then((results) => { - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - }) - } - } - - return new Promise((resolve, reject) => { - try { - tryFn() - .then(resolve) - .catch(reject) - } catch (err) { - reject(err) - } - }) -} - -function withContentSriSync (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - let lastErr = null - for (const meta of digests) { - try { - return withContentSriSync(cache, meta, fn) - } catch (err) { - lastErr = err - } - } - throw lastErr - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index 50612364e9b48..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const util = require('util') - -const contentPath = require('./path') -const { hasContent } = require('./read') -const rimraf = util.promisify(require('rimraf')) - -module.exports = rm - -function rm (cache, integrity) { - return hasContent(cache, integrity).then((content) => { - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - return rimraf(contentPath(cache, content.sri)).then(() => true) - } else { - return false - } - }) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js deleted file mode 100644 index a71e81ad5e150..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,194 +0,0 @@ -'use strict' - -const util = require('util') - -const contentPath = require('./path') -const fixOwner = require('../util/fix-owner') -const fs = require('fs') -const moveFile = require('../util/move-file') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const { disposer } = require('./../util/disposer') -const fsm = require('fs-minipass') - -const writeFile = util.promisify(fs.writeFile) - -module.exports = write - -function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - if (algorithms && algorithms.length > 1) { - throw new Error('opts.algorithms only supports a single algorithm for now') - } - - if (typeof size === 'number' && data.length !== size) { - return Promise.reject(sizeError(size, data.length)) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - return Promise.reject(checksumError(integrity, sri)) - } - - return disposer(makeTmp(cache, opts), makeTmpDisposer, - (tmp) => { - return writeFile(tmp.target, data, { flag: 'wx' }) - .then(() => moveToDestination(tmp, cache, sri, opts)) - }) - .then(() => ({ integrity: sri, size: data.length })) -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -function handleContent (inputStream, cache, opts) { - return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { - return pipeToTmp(inputStream, cache, tmp.target, opts) - .then((res) => { - return moveToDestination( - tmp, - cache, - res.integrity, - opts - ).then(() => res) - }) - }) -} - -function pipeToTmp (inputStream, cache, tmpTarget, opts) { - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - // NB: this can throw if the hashStream has a problem with - // it, and the data is fully written. but pipeToTmp is only - // called in promisory contexts where that is handled. - const pipeline = new Pipeline( - inputStream, - hashStream, - outStream - ) - - return pipeline.promise() - .then(() => ({ integrity, size })) - .catch(er => rimraf(tmpTarget).then(() => { - throw er - })) -} - -function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ - target: tmpTarget, - moved: false, - })) -} - -function makeTmpDisposer (tmp) { - if (tmp.moved) { - return Promise.resolve() - } - - return rimraf(tmp.target) -} - -function moveToDestination (tmp, cache, sri, opts) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - - return fixOwner - .mkdirfix(cache, destDir) - .then(() => { - return moveFile(tmp.target, destination) - }) - .then(() => { - tmp.moved = true - return fixOwner.chownr(cache, destination) - }) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 426778b850963..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,412 +0,0 @@ -'use strict' - -const util = require('util') -const crypto = require('crypto') -const fs = require('fs') -const Minipass = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const { disposer } = require('./util/disposer') -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const moveFile = require('@npmcli/move-file') -const _rimraf = require('rimraf') -const rimraf = util.promisify(_rimraf) -rimraf.sync = _rimraf.sync - -const appendFile = util.promisify(fs.appendFile) -const readFile = util.promisify(fs.readFile) -const readdir = util.promisify(fs.readdir) -const writeFile = util.promisify(fs.writeFile) - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(target)) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rimraf(tmp.target) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await fixOwner.mkdirfix(cache, path.dirname(bucket)) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - try { - await fixOwner.chownr(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - } - - // write the file atomically - await disposer(setup(), teardown, write) - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -function insert (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - return fixOwner - .mkdirfix(cache, path.dirname(bucket)) - .then(() => { - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - }) - .then(() => fixOwner.chownr(cache, bucket)) - .catch((err) => { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - // There's a class of race conditions that happen when things get deleted - // during fixOwner, or between the two mkdirfix/chownr calls. - // - // It's perfectly fine to just not bother in those cases and lie - // that the index entry was written. Because it's a cache. - }) - .then(() => { - return formatEntry(cache, entry) - }) -} - -module.exports.insert.sync = insertSync - -function insertSync (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) - const stringified = JSON.stringify(entry) - fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - try { - fixOwner.chownr.sync(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -function find (cache, key) { - const bucket = bucketPath(cache, key) - return bucketEntries(bucket) - .then((entries) => { - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - }) -} - -module.exports.find.sync = findSync - -function findSync (cache, key) { - const bucket = bucketPath(cache, key) - try { - return bucketEntriesSync(bucket).reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf(bucket) -} - -module.exports.delete.sync = delSync - -function delSync (cache, key, opts = {}) { - if (!opts.removeFully) { - return insertSync(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf.sync(bucket) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - readdirOrEmpty(indexDir).then(buckets => Promise.all( - buckets.map(bucket => { - const bucketPath = path.join(indexDir, bucket) - return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( - subbuckets.map(subbucket => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - return readdirOrEmpty(subbucketPath).then(entries => Promise.all( - entries.map(entry => { - const entryPath = path.join(subbucketPath, entry) - return bucketEntries(entryPath).then(entries => - // using a Map here prevents duplicate keys from - // showing up twice, I guess? - entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - ).then(reduced => { - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - }).catch(err => { - if (err.code === 'ENOENT') { - return undefined - } - throw err - }) - }) - )) - }) - )) - }) - )) - .then( - () => stream.end(), - err => stream.emit('error', err) - ) - - return stream -} - -module.exports.ls = ls - -function ls (cache) { - return lsStream(cache).collect().then(entries => - entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) - ) -} - -module.exports.bucketEntries = bucketEntries - -function bucketEntries (bucket, filter) { - return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) -} - -module.exports.bucketEntries.sync = bucketEntriesSync - -function bucketEntriesSync (bucket, filter) { - const data = fs.readFileSync(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data, filter) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (e) { - // Entry is corrupted! - return - } - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js deleted file mode 100644 index e1b13dd5fd528..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,74 +0,0 @@ -'use strict' - -const LRU = require('lru-cache') - -const MAX_SIZE = 50 * 1024 * 1024 // 50MB -const MAX_AGE = 3 * 60 * 1000 - -const MEMOIZED = new LRU({ - max: MAX_SIZE, - maxAge: MAX_AGE, - length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.reset() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/disposer.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/disposer.js deleted file mode 100644 index 52d7d3edda7d5..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/disposer.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -module.exports.disposer = disposer - -function disposer (creatorFn, disposerFn, fn) { - const runDisposer = (resource, result, shouldThrow = false) => { - return disposerFn(resource) - .then( - // disposer resolved, do something with original fn's promise - () => { - if (shouldThrow) { - throw result - } - - return result - }, - // Disposer fn failed, crash process - (err) => { - throw err - // Or process.exit? - }) - } - - return creatorFn - .then((resource) => { - // fn(resource) can throw, so wrap in a promise here - return Promise.resolve().then(() => fn(resource)) - .then((result) => runDisposer(resource, result)) - .catch((err) => runDisposer(resource, err, true)) - }) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/fix-owner.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/fix-owner.js deleted file mode 100644 index bc14def4e405c..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/fix-owner.js +++ /dev/null @@ -1,148 +0,0 @@ -'use strict' - -const util = require('util') - -const chownr = util.promisify(require('chownr')) -const mkdirp = require('mkdirp') -const inflight = require('promise-inflight') -const inferOwner = require('infer-owner') - -// Memoize getuid()/getgid() calls. -// patch process.setuid/setgid to invalidate cached value on change -const self = { uid: null, gid: null } -const getSelf = () => { - if (typeof self.uid !== 'number') { - self.uid = process.getuid() - const setuid = process.setuid - process.setuid = (uid) => { - self.uid = null - process.setuid = setuid - return process.setuid(uid) - } - } - if (typeof self.gid !== 'number') { - self.gid = process.getgid() - const setgid = process.setgid - process.setgid = (gid) => { - self.gid = null - process.setgid = setgid - return process.setgid(gid) - } - } -} - -module.exports.chownr = fixOwner - -function fixOwner (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return Promise.resolve() - } - - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return Promise.resolve() - } - - return Promise.resolve(inferOwner(cache)).then((owner) => { - const { uid, gid } = owner - - // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { - return - } - - return inflight('fixOwner: fixing ownership on ' + filepath, () => - chownr( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ).catch((err) => { - if (err.code === 'ENOENT') { - return null - } - - throw err - }) - ) - }) -} - -module.exports.chownr.sync = fixOwnerSync - -function fixOwnerSync (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - const { uid, gid } = inferOwner.sync(cache) - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - if (self.uid === uid && self.gid === gid) { - // No need to override if it's already what we used. - return - } - try { - chownr.sync( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ) - } catch (err) { - // only catch ENOENT, any other error is a problem. - if (err.code === 'ENOENT') { - return null - } - - throw err - } -} - -module.exports.mkdirfix = mkdirfix - -function mkdirfix (cache, p, cb) { - // we have to infer the owner _before_ making the directory, even though - // we aren't going to use the results, since the cache itself might not - // exist yet. If we mkdirp it, then our current uid/gid will be assumed - // to be correct if it creates the cache folder in the process. - return Promise.resolve(inferOwner(cache)).then(() => { - return mkdirp(p) - .then((made) => { - if (made) { - return fixOwner(cache, made).then(() => made) - } - }) - .catch((err) => { - if (err.code === 'EEXIST') { - return fixOwner(cache, p).then(() => null) - } - - throw err - }) - }) -} - -module.exports.mkdirfix.sync = mkdirfixSync - -function mkdirfixSync (cache, p) { - try { - inferOwner.sync(cache) - const made = mkdirp.sync(p) - if (made) { - fixOwnerSync(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - fixOwnerSync(cache, p) - return null - } else { - throw err - } - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/move-file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/move-file.js deleted file mode 100644 index 3739cea3df281..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/move-file.js +++ /dev/null @@ -1,69 +0,0 @@ -'use strict' - -const fs = require('fs') -const util = require('util') -const chmod = util.promisify(fs.chmod) -const unlink = util.promisify(fs.unlink) -const stat = util.promisify(fs.stat) -const move = require('@npmcli/move-file') -const pinflight = require('promise-inflight') - -module.exports = moveFile - -function moveFile (src, dest) { - const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || - process.platform === 'win32' - - // This isn't quite an fs.rename -- the assumption is that - // if `dest` already exists, and we get certain errors while - // trying to move it, we should just not bother. - // - // In the case of cache corruption, users will receive an - // EINTEGRITY error elsewhere, and can remove the offending - // content their own way. - // - // Note that, as the name suggests, this strictly only supports file moves. - return new Promise((resolve, reject) => { - fs.link(src, dest, (err) => { - if (err) { - if (isWindows && err.code === 'EPERM') { - // XXX This is a really weird way to handle this situation, as it - // results in the src file being deleted even though the dest - // might not exist. Since we pretty much always write files to - // deterministic locations based on content hash, this is likely - // ok (or at worst, just ends in a future cache miss). But it would - // be worth investigating at some time in the future if this is - // really what we want to do here. - return resolve() - } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { - // file already exists, so whatever - return resolve() - } else { - return reject(err) - } - } else { - return resolve() - } - }) - }) - .then(() => { - // content should never change for any reason, so make it read-only - return Promise.all([ - unlink(src), - !isWindows && chmod(dest, '0444'), - ]) - }) - .catch(() => { - return pinflight('cacache-move-file:' + dest, () => { - return stat(dest).catch((err) => { - if (err.code !== 'ENOENT') { - // Something else is wrong here. Bail bail bail - throw err - } - // file doesn't already exist! let's try a rename -> copy fallback - // only delete if it successfully copies - return move(src, dest) - }) - }) - }) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0a5a50eba3061..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') - -const fixOwner = require('./fix-owner') -const path = require('path') - -module.exports.mkdir = mktmpdir - -function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - .then(() => { - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) - }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) -} - -module.exports.fix = fixtmpdir - -function fixtmpdir (cache) { - return fixOwner(cache, path.join(cache, 'tmp')) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js deleted file mode 100644 index 300cd9f9de1c4..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,291 +0,0 @@ -'use strict' - -const util = require('util') - -const pMap = require('p-map') -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const fs = require('fs') -const fsm = require('fs-minipass') -const glob = util.promisify(require('glob')) -const index = require('./entry-index') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const stat = util.promisify(fs.stat) -const truncate = util.promisify(fs.truncate) -const writeFile = util.promisify(fs.writeFile) -const readFile = util.promisify(fs.readFile) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - return steps - .reduce((promise, step, i) => { - const label = step.name - const start = new Date() - return promise.then((stats) => { - return step(cache, opts).then((s) => { - s && - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - - stats.runTime[label] = end - start - return Promise.resolve(stats) - }) - }) - }, Promise.resolve({})) - .then((stats) => { - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats - }) -} - -function markStartTime (cache, opts) { - return Promise.resolve({ startTime: new Date() }) -} - -function markEndTime (cache, opts) { - return Promise.resolve({ endTime: new Date() }) -} - -function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - return fixOwner - .mkdirfix(cache, cache) - .then(() => { - // TODO - fix file permissions too - return fixOwner.chownr(cache, cache) - }) - .then(() => null) -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rimraf it. -// -function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - liveContent.add(entry.integrity.toString()) - }) - return new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }).then(() => { - const contentDir = contentPath.contentDir(cache) - return glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }).then((files) => { - return Promise.resolve({ - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - }).then((stats) => - pMap( - files, - (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - return verifyContent(f, integrity).then((info) => { - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - return stats - }) - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - return stat(f).then((s) => { - return rimraf(f).then(() => { - stats.reclaimedSize += s.size - return stats - }) - }) - } - }, - { concurrency: opts.concurrency } - ).then(() => stats) - ) - }) - }) -} - -function verifyContent (filepath, sri) { - return stat(filepath) - .then((s) => { - const contentInfo = { - size: s.size, - valid: true, - } - return ssri - .checkStream(new fsm.ReadStream(filepath), sri) - .catch((err) => { - if (err.code !== 'EINTEGRITY') { - throw err - } - - return rimraf(filepath).then(() => { - contentInfo.valid = false - }) - }) - .then(() => contentInfo) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - - throw err - }) -} - -function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - return index.ls(cache).then((entries) => { - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - return pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ).then(() => stats) - }) -} - -function rebuildBucket (cache, bucket, stats, opts) { - return truncate(bucket._path).then(() => { - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - return bucket.reduce((promise, entry) => { - return promise.then(() => { - const content = contentPath(cache, entry.integrity) - return stat(content) - .then(() => { - return index - .insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - }) - .then(() => { - stats.totalEntries++ - }) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - return - } - throw err - }) - }) - }, Promise.resolve()) - }) -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rimraf(path.join(cache, 'tmp')) -} - -function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - try { - return writeFile(verifile, '' + +new Date()) - } finally { - fixOwner.chownr.sync(cache, verifile) - } -} - -module.exports.lastRun = lastRun - -function lastRun (cache) { - return readFile(path.join(cache, '_lastverified'), 'utf8').then( - (data) => new Date(+data) - ) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json deleted file mode 100644 index b9efa92d9f3e0..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "name": "cacache", - "version": "16.0.0", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin", - "lib" - ], - "scripts": { - "benchmarks": "node test/benchmarks", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint '**/*.js'", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "npm-template-check", - "template-copy": "npm-template-copy --force", - "posttest": "npm run lint" - }, - "repository": "https://github.com/npm/cacache", - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - }, - "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "benchmark": "^2.1.4", - "chalk": "^4.0.0", - "require-inject": "^1.4.4", - "tacks": "^1.3.0", - "tap": "^15.0.9" - }, - "tap": { - "100": true, - "test-regex": "test/[^/]*.js" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - }, - "templateOSS": { - "windowsCI": false, - "version": "2.9.2" - }, - "author": "GitHub Inc." -} diff --git a/node_modules/cacache/get.js b/node_modules/cacache/get.js deleted file mode 100644 index 4e905e7cf861c..0000000000000 --- a/node_modules/cacache/get.js +++ /dev/null @@ -1,237 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') -const fs = require('fs') -const util = require('util') - -const index = require('./lib/entry-index') -const memo = require('./lib/memoization') -const read = require('./lib/content/read') - -const writeFile = util.promisify(fs.writeFile) - -function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve({ - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - }) - } - - return index.find(cache, key, opts).then((entry) => { - if (!entry) - throw new index.NotFoundError(cache, key) - - return read(cache, entry.integrity, { integrity, size }).then((data) => { - if (memoize) - memo.put(cache, entry, data, opts) - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) -} -module.exports = getData - -function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) - return Promise.resolve(memoized) - - return read(cache, key, { integrity, size }).then((res) => { - if (memoize) - memo.put.byDigest(cache, key, res, opts) - return res - }) -} -module.exports.byDigest = getDataByDigest - -function getDataSync (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - const entry = index.find.sync(cache, key, opts) - if (!entry) - throw new index.NotFoundError(cache, key) - const data = read.sync(cache, entry.integrity, { - integrity: integrity, - size: size, - }) - const res = { - metadata: entry.metadata, - data: data, - size: entry.size, - integrity: entry.integrity, - } - if (memoize) - memo.put(cache, entry, res.data, opts) - - return res -} - -module.exports.sync = getDataSync - -function getDataByDigestSync (cache, digest, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, digest, opts) - - if (memoized && memoize !== false) - return memoized - - const res = read.sync(cache, digest, { - integrity: integrity, - size: size, - }) - if (memoize) - memo.put.byDigest(cache, digest, res, opts) - - return res -} -module.exports.sync.byDigest = getDataByDigestSync - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) - return getMemoizedStream(memoized) - - const stream = new Pipeline() - index - .find(cache, key) - .then((entry) => { - if (!entry) - throw new index.NotFoundError(cache, key) - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - }) - .catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) - return stream - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) - return Promise.resolve(memoized.entry) - else - return index.find(cache, key) -} -module.exports.info = info - -function copy (cache, key, dest, opts = {}) { - if (read.copy) { - return index.find(cache, key, opts).then((entry) => { - if (!entry) - throw new index.NotFoundError(cache, key) - return read.copy(cache, entry.integrity, dest, opts) - .then(() => { - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) - } - - return getData(cache, key, opts).then((res) => { - return writeFile(dest, res.data).then(() => { - return { - metadata: res.metadata, - size: res.size, - integrity: res.integrity, - } - }) - }) -} -module.exports.copy = copy - -function copyByDigest (cache, key, dest, opts = {}) { - if (read.copy) - return read.copy(cache, key, dest, opts).then(() => key) - - return getDataByDigest(cache, key, opts).then((res) => { - return writeFile(dest, res).then(() => key) - }) -} -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/cacache/index.js b/node_modules/cacache/index.js deleted file mode 100644 index c8c52b0417dea..0000000000000 --- a/node_modules/cacache/index.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict' - -const ls = require('./ls.js') -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./lib/memoization.js') -const tmp = require('./lib/util/tmp.js') -const index = require('./lib/entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = ls -module.exports.ls.stream = ls.stream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.sync = get.sync -module.exports.get.sync.byDigest = get.sync.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent -module.exports.get.hasContent.sync = get.hasContent.sync - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/cacache/lib/content/read.js b/node_modules/cacache/lib/content/read.js index 034e8eee05b10..8bffb2af83cab 100644 --- a/node_modules/cacache/lib/content/read.js +++ b/node_modules/cacache/lib/content/read.js @@ -20,15 +20,18 @@ function read (cache, integrity, opts = {}) { // get size return lstat(cpath).then(stat => ({ stat, cpath, sri })) }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && stat.size !== size) + if (typeof size === 'number' && stat.size !== size) { throw sizeError(size, stat.size) + } - if (stat.size > MAX_SINGLE_READ_SIZE) + if (stat.size > MAX_SINGLE_READ_SIZE) { return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } return readFile(cpath, null).then((data) => { - if (!ssri.checkData(data, sri)) + if (!ssri.checkData(data, sri)) { throw integrityError(sri, cpath) + } return data }) @@ -55,11 +58,13 @@ function readSync (cache, integrity, opts = {}) { const { size } = opts return withContentSriSync(cache, integrity, (cpath, sri) => { const data = fs.readFileSync(cpath) - if (typeof size === 'number' && size !== data.length) + if (typeof size === 'number' && size !== data.length) { throw sizeError(size, data.length) + } - if (ssri.checkData(data, sri)) + if (ssri.checkData(data, sri)) { return data + } throw integrityError(sri, cpath) }) @@ -75,8 +80,9 @@ function readStream (cache, integrity, opts = {}) { // just lstat to ensure it exists return lstat(cpath).then((stat) => ({ stat, cpath, sri })) }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && size !== stat.size) + if (typeof size === 'number' && size !== stat.size) { return stream.emit('error', sizeError(size, stat.size)) + } readPipeline(cpath, stat.size, sri, stream) }, er => stream.emit('error', er)) @@ -106,21 +112,24 @@ function copySync (cache, integrity, dest) { module.exports.hasContent = hasContent function hasContent (cache, integrity) { - if (!integrity) + if (!integrity) { return Promise.resolve(false) + } return withContentSri(cache, integrity, (cpath, sri) => { return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) }).catch((err) => { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return false + } if (err.code === 'EPERM') { /* istanbul ignore else */ - if (process.platform !== 'win32') + if (process.platform !== 'win32') { throw err - else + } else { return false + } } }) } @@ -128,23 +137,26 @@ function hasContent (cache, integrity) { module.exports.hasContent.sync = hasContentSync function hasContentSync (cache, integrity) { - if (!integrity) + if (!integrity) { return false + } return withContentSriSync(cache, integrity, (cpath, sri) => { try { const stat = fs.lstatSync(cpath) return { size: stat.size, sri, stat } } catch (err) { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return false + } if (err.code === 'EPERM') { /* istanbul ignore else */ - if (process.platform !== 'win32') + if (process.platform !== 'win32') { throw err - else + } else { return false + } } } }) @@ -180,13 +192,15 @@ function withContentSri (cache, integrity, fn) { .then((results) => { // Return the first non error if it is found const result = results.find((r) => !(r instanceof Error)) - if (result) + if (result) { return result + } // Throw the No matching content found error const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) + if (enoentError) { throw enoentError + } // Throw generic error throw results.find((r) => r instanceof Error) @@ -228,6 +242,7 @@ function withContentSriSync (cache, integrity, fn) { } function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) err.expected = expected err.found = found diff --git a/node_modules/cacache/lib/content/rm.js b/node_modules/cacache/lib/content/rm.js index 6a3d1a3d02340..50612364e9b48 100644 --- a/node_modules/cacache/lib/content/rm.js +++ b/node_modules/cacache/lib/content/rm.js @@ -11,9 +11,10 @@ module.exports = rm function rm (cache, integrity) { return hasContent(cache, integrity).then((content) => { // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) + if (content && content.sri) { return rimraf(contentPath(cache, content.sri)).then(() => true) - else + } else { return false + } }) } diff --git a/node_modules/cacache/lib/content/write.js b/node_modules/cacache/lib/content/write.js index dde1bd1dd5dae..a71e81ad5e150 100644 --- a/node_modules/cacache/lib/content/write.js +++ b/node_modules/cacache/lib/content/write.js @@ -22,15 +22,18 @@ module.exports = write function write (cache, data, opts = {}) { const { algorithms, size, integrity } = opts - if (algorithms && algorithms.length > 1) + if (algorithms && algorithms.length > 1) { throw new Error('opts.algorithms only supports a single algorithm for now') + } - if (typeof size === 'number' && data.length !== size) + if (typeof size === 'number' && data.length !== size) { return Promise.reject(sizeError(size, data.length)) + } const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) + if (integrity && !ssri.checkData(data, integrity, opts)) { return Promise.reject(checksumError(integrity, sri)) + } return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { @@ -149,8 +152,9 @@ function makeTmp (cache, opts) { } function makeTmpDisposer (tmp) { - if (tmp.moved) + if (tmp.moved) { return Promise.resolve() + } return rimraf(tmp.target) } @@ -171,6 +175,7 @@ function moveToDestination (tmp, cache, sri, opts) { } function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) err.expected = expected err.found = found diff --git a/node_modules/cacache/lib/entry-index.js b/node_modules/cacache/lib/entry-index.js index 71aac5ed75b14..426778b850963 100644 --- a/node_modules/cacache/lib/entry-index.js +++ b/node_modules/cacache/lib/entry-index.js @@ -49,8 +49,9 @@ async function compact (cache, key, matchFn, opts = {}) { // if the integrity is null and no validateEntry is provided, we break // as we consider the null integrity to be a deletion of everything // that came before it. - if (entry.integrity === null && !opts.validateEntry) + if (entry.integrity === null && !opts.validateEntry) { break + } // if this entry is valid, and it is either the first entry or // the newEntries array doesn't already include an entry that @@ -58,8 +59,9 @@ async function compact (cache, key, matchFn, opts = {}) { // it to the beginning of our list if ((!opts.validateEntry || opts.validateEntry(entry) === true) && (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { newEntries.unshift(entry) + } } const newIndex = '\n' + newEntries.map((entry) => { @@ -78,8 +80,9 @@ async function compact (cache, key, matchFn, opts = {}) { } const teardown = async (tmp) => { - if (!tmp.moved) + if (!tmp.moved) { return rimraf(tmp.target) + } } const write = async (tmp) => { @@ -92,8 +95,9 @@ async function compact (cache, key, matchFn, opts = {}) { try { await fixOwner.chownr(cache, bucket) } catch (err) { - if (err.code !== 'ENOENT') + if (err.code !== 'ENOENT') { throw err + } } } @@ -136,8 +140,9 @@ function insert (cache, key, integrity, opts = {}) { }) .then(() => fixOwner.chownr(cache, bucket)) .catch((err) => { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return undefined + } throw err // There's a class of race conditions that happen when things get deleted @@ -169,8 +174,9 @@ function insertSync (cache, key, integrity, opts = {}) { try { fixOwner.chownr.sync(cache, bucket) } catch (err) { - if (err.code !== 'ENOENT') + if (err.code !== 'ENOENT') { throw err + } } return formatEntry(cache, entry) } @@ -182,17 +188,19 @@ function find (cache, key) { return bucketEntries(bucket) .then((entries) => { return entries.reduce((latest, next) => { - if (next && next.key === key) + if (next && next.key === key) { return formatEntry(cache, next) - else + } else { return latest + } }, null) }) .catch((err) => { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return null - else + } else { throw err + } }) } @@ -202,24 +210,27 @@ function findSync (cache, key) { const bucket = bucketPath(cache, key) try { return bucketEntriesSync(bucket).reduce((latest, next) => { - if (next && next.key === key) + if (next && next.key === key) { return formatEntry(cache, next) - else + } else { return latest + } }, null) } catch (err) { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return null - else + } else { throw err + } } } module.exports.delete = del function del (cache, key, opts = {}) { - if (!opts.removeFully) + if (!opts.removeFully) { return insert(cache, key, null, opts) + } const bucket = bucketPath(cache, key) return rimraf(bucket) @@ -228,8 +239,9 @@ function del (cache, key, opts = {}) { module.exports.delete.sync = delSync function delSync (cache, key, opts = {}) { - if (!opts.removeFully) + if (!opts.removeFully) { return insertSync(cache, key, null, opts) + } const bucket = bucketPath(cache, key) return rimraf.sync(bucket) @@ -263,12 +275,14 @@ function lsStream (cache) { // reduced is a map of key => entry for (const entry of reduced.values()) { const formatted = formatEntry(cache, entry) - if (formatted) + if (formatted) { stream.write(formatted) + } } }).catch(err => { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return undefined + } throw err }) }) @@ -312,8 +326,9 @@ function bucketEntriesSync (bucket, filter) { function _bucketEntries (data, filter) { const entries = [] data.split('\n').forEach((entry) => { - if (!entry) + if (!entry) { return + } const pieces = entry.split('\t') if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { @@ -328,8 +343,9 @@ function _bucketEntries (data, filter) { // Entry is corrupted! return } - if (obj) + if (obj) { entries.push(obj) + } }) return entries } @@ -371,8 +387,9 @@ function hash (str, digest) { function formatEntry (cache, entry, keepAll) { // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) + if (!entry.integrity && !keepAll) { return null + } return { key: entry.key, @@ -386,8 +403,9 @@ function formatEntry (cache, entry, keepAll) { function readdirOrEmpty (dir) { return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { return [] + } throw err }) diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js b/node_modules/cacache/lib/get.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js rename to node_modules/cacache/lib/get.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js b/node_modules/cacache/lib/index.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js rename to node_modules/cacache/lib/index.js diff --git a/node_modules/cacache/lib/memoization.js b/node_modules/cacache/lib/memoization.js index d5465f39fc581..e1b13dd5fd528 100644 --- a/node_modules/cacache/lib/memoization.js +++ b/node_modules/cacache/lib/memoization.js @@ -62,12 +62,13 @@ class ObjProxy { } function pickMem (opts) { - if (!opts || !opts.memoize) + if (!opts || !opts.memoize) { return MEMOIZED - else if (opts.memoize.get && opts.memoize.set) + } else if (opts.memoize.get && opts.memoize.set) { return opts.memoize - else if (typeof opts.memoize === 'object') + } else if (typeof opts.memoize === 'object') { return new ObjProxy(opts.memoize) - else + } else { return MEMOIZED + } } diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js b/node_modules/cacache/lib/put.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js rename to node_modules/cacache/lib/put.js diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js b/node_modules/cacache/lib/rm.js similarity index 100% rename from node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js rename to node_modules/cacache/lib/rm.js diff --git a/node_modules/cacache/lib/util/disposer.js b/node_modules/cacache/lib/util/disposer.js index aa8aed54da551..52d7d3edda7d5 100644 --- a/node_modules/cacache/lib/util/disposer.js +++ b/node_modules/cacache/lib/util/disposer.js @@ -8,8 +8,9 @@ function disposer (creatorFn, disposerFn, fn) { .then( // disposer resolved, do something with original fn's promise () => { - if (shouldThrow) + if (shouldThrow) { throw result + } return result }, diff --git a/node_modules/cacache/lib/util/fix-owner.js b/node_modules/cacache/lib/util/fix-owner.js index 90ffece524f54..bc14def4e405c 100644 --- a/node_modules/cacache/lib/util/fix-owner.js +++ b/node_modules/cacache/lib/util/fix-owner.js @@ -49,8 +49,9 @@ function fixOwner (cache, filepath) { const { uid, gid } = owner // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) + if (self.uid === uid && self.gid === gid) { return + } return inflight('fixOwner: fixing ownership on ' + filepath, () => chownr( @@ -58,8 +59,9 @@ function fixOwner (cache, filepath) { typeof uid === 'number' ? uid : self.uid, typeof gid === 'number' ? gid : self.gid ).catch((err) => { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return null + } throw err }) @@ -93,8 +95,9 @@ function fixOwnerSync (cache, filepath) { ) } catch (err) { // only catch ENOENT, any other error is a problem. - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return null + } throw err } @@ -110,12 +113,14 @@ function mkdirfix (cache, p, cb) { return Promise.resolve(inferOwner(cache)).then(() => { return mkdirp(p) .then((made) => { - if (made) + if (made) { return fixOwner(cache, made).then(() => made) + } }) .catch((err) => { - if (err.code === 'EEXIST') + if (err.code === 'EEXIST') { return fixOwner(cache, p).then(() => null) + } throw err }) @@ -136,7 +141,8 @@ function mkdirfixSync (cache, p) { if (err.code === 'EEXIST') { fixOwnerSync(cache, p) return null - } else + } else { throw err + } } } diff --git a/node_modules/cacache/lib/util/move-file.js b/node_modules/cacache/lib/util/move-file.js index c3f9e35eb99c7..3739cea3df281 100644 --- a/node_modules/cacache/lib/util/move-file.js +++ b/node_modules/cacache/lib/util/move-file.js @@ -38,10 +38,12 @@ function moveFile (src, dest) { } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { // file already exists, so whatever return resolve() - } else + } else { return reject(err) - } else + } + } else { return resolve() + } }) }) .then(() => { diff --git a/node_modules/cacache/lib/verify.js b/node_modules/cacache/lib/verify.js index e9d679eceaf51..300cd9f9de1c4 100644 --- a/node_modules/cacache/lib/verify.js +++ b/node_modules/cacache/lib/verify.js @@ -54,8 +54,9 @@ function verify (cache, opts) { stats[k] = s[k] }) const end = new Date() - if (!stats.runTime) + if (!stats.runTime) { stats.runTime = {} + } stats.runTime[label] = end - start return Promise.resolve(stats) @@ -108,8 +109,9 @@ function garbageCollect (cache, opts) { const indexStream = index.lsStream(cache) const liveContent = new Set() indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) + if (opts.filter && !opts.filter(entry)) { return + } liveContent.add(entry.integrity.toString()) }) @@ -176,8 +178,9 @@ function verifyContent (filepath, sri) { return ssri .checkStream(new fsm.ReadStream(filepath), sri) .catch((err) => { - if (err.code !== 'EINTEGRITY') + if (err.code !== 'EINTEGRITY') { throw err + } return rimraf(filepath).then(() => { contentInfo.valid = false @@ -186,8 +189,9 @@ function verifyContent (filepath, sri) { .then(() => contentInfo) }) .catch((err) => { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { return { size: 0, valid: false } + } throw err }) @@ -209,9 +213,9 @@ function rebuildIndex (cache, opts) { const entry = entries[k] const excluded = opts.filter && !opts.filter(entry) excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) + if (buckets[hashed] && !excluded) { buckets[hashed].push(entry) - else if (buckets[hashed] && excluded) { + } else if (buckets[hashed] && excluded) { // skip } else if (excluded) { buckets[hashed] = [] diff --git a/node_modules/cacache/ls.js b/node_modules/cacache/ls.js deleted file mode 100644 index 6006c99e34852..0000000000000 --- a/node_modules/cacache/ls.js +++ /dev/null @@ -1,6 +0,0 @@ -'use strict' - -const index = require('./lib/entry-index') - -module.exports = index.ls -module.exports.stream = index.lsStream diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index 6cb4140159af8..b9efa92d9f3e0 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,14 +1,14 @@ { "name": "cacache", - "version": "15.3.0", + "version": "16.0.0", "cache-version": { "content": "2", "index": "5" }, "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "index.js", + "main": "lib/index.js", "files": [ - "*.js", + "bin", "lib" ], "scripts": { @@ -20,10 +20,13 @@ "snap": "tap", "coverage": "tap", "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "npm run npmclilint -- \"*.*js\" \"lib/**/*.*js\" \"test/**/*.*js\"", + "lint": "eslint '**/*.js'", "npmclilint": "npmcli-lint", "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --" + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "posttest": "npm run lint" }, "repository": "https://github.com/npm/cacache", "keywords": [ @@ -44,26 +47,26 @@ "license": "ISC", "dependencies": { "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.0.1", + "@npmcli/move-file": "^1.1.2", "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", + "fs-minipass": "^2.1.0", "glob": "^7.1.4", "infer-owner": "^1.0.4", "lru-cache": "^6.0.0", "minipass": "^3.1.1", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.2", - "mkdirp": "^1.0.3", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", "p-map": "^4.0.0", "promise-inflight": "^1.0.1", "rimraf": "^3.0.2", "ssri": "^8.0.1", - "tar": "^6.0.2", + "tar": "^6.1.11", "unique-filename": "^1.1.1" }, "devDependencies": { - "@npmcli/lint": "^1.0.1", + "@npmcli/template-oss": "^2.9.2", "benchmark": "^2.1.4", "chalk": "^4.0.0", "require-inject": "^1.4.4", @@ -75,6 +78,11 @@ "test-regex": "test/[^/]*.js" }, "engines": { - "node": ">= 10" - } + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "windowsCI": false, + "version": "2.9.2" + }, + "author": "GitHub Inc." } diff --git a/node_modules/cacache/put.js b/node_modules/cacache/put.js deleted file mode 100644 index 84e9562bc33ab..0000000000000 --- a/node_modules/cacache/put.js +++ /dev/null @@ -1,83 +0,0 @@ -'use strict' - -const index = require('./lib/entry-index') -const memo = require('./lib/memoization') -const write = require('./lib/content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - return write(cache, data, opts).then((res) => { - return index - .insert(cache, key, res.integrity, { ...opts, size: res.size }) - .then((entry) => { - if (memoize) - memo.put(cache, entry, data, opts) - - return res.integrity - }) - }) -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - flush () { - return index - .insert(cache, key, integrity, { ...opts, size }) - .then((entry) => { - if (memoize && memoData) - memo.put(cache, entry, memoData, opts) - - if (integrity) - pipeline.emit('integrity', integrity) - - if (size) - pipeline.emit('size', size) - }) - }, - })) - - return pipeline -} diff --git a/node_modules/cacache/rm.js b/node_modules/cacache/rm.js deleted file mode 100644 index f2ef6b190f457..0000000000000 --- a/node_modules/cacache/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const util = require('util') - -const index = require('./lib/entry-index') -const memo = require('./lib/memoization') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const rmContent = require('./lib/content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -function all (cache) { - memo.clearMemoized() - return rimraf(path.join(cache, '*(content-*|index-*)')) -} diff --git a/node_modules/cacache/verify.js b/node_modules/cacache/verify.js deleted file mode 100644 index db7763d7afd07..0000000000000 --- a/node_modules/cacache/verify.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = require('./lib/verify') diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 8bffb2af83cab..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,259 +0,0 @@ -'use strict' - -const util = require('util') - -const fs = require('fs') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -const lstat = util.promisify(fs.lstat) -const readFile = util.promisify(fs.readFile) - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -function read (cache, integrity, opts = {}) { - const { size } = opts - return withContentSri(cache, integrity, (cpath, sri) => { - // get size - return lstat(cpath).then(stat => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - return readFile(cpath, null).then((data) => { - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data - }) - }) -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.sync = readSync - -function readSync (cache, integrity, opts = {}) { - const { size } = opts - return withContentSriSync(cache, integrity, (cpath, sri) => { - const data = fs.readFileSync(cpath) - if (typeof size === 'number' && size !== data.length) { - throw sizeError(size, data.length) - } - - if (ssri.checkData(data, sri)) { - return data - } - - throw integrityError(sri, cpath) - }) -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - withContentSri(cache, integrity, (cpath, sri) => { - // just lstat to ensure it exists - return lstat(cpath).then((stat) => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && size !== stat.size) { - return stream.emit('error', sizeError(size, stat.size)) - } - - readPipeline(cpath, stat.size, sri, stream) - }, er => stream.emit('error', er)) - - return stream -} - -let copyFile -if (fs.copyFile) { - module.exports.copy = copy - module.exports.copy.sync = copySync - copyFile = util.promisify(fs.copyFile) -} - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath, sri) => { - return copyFile(cpath, dest) - }) -} - -function copySync (cache, integrity, dest) { - return withContentSriSync(cache, integrity, (cpath, sri) => { - return fs.copyFileSync(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -function hasContent (cache, integrity) { - if (!integrity) { - return Promise.resolve(false) - } - - return withContentSri(cache, integrity, (cpath, sri) => { - return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) - }).catch((err) => { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - }) -} - -module.exports.hasContent.sync = hasContentSync - -function hasContentSync (cache, integrity) { - if (!integrity) { - return false - } - - return withContentSriSync(cache, integrity, (cpath, sri) => { - try { - const stat = fs.lstatSync(cpath) - return { size: stat.size, sri, stat } - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } - }) -} - -function withContentSri (cache, integrity, fn) { - const tryFn = () => { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - return Promise - .all(digests.map((meta) => { - return withContentSri(cache, meta, fn) - .catch((err) => { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - }) - })) - .then((results) => { - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - }) - } - } - - return new Promise((resolve, reject) => { - try { - tryFn() - .then(resolve) - .catch(reject) - } catch (err) { - reject(err) - } - }) -} - -function withContentSriSync (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - let lastErr = null - for (const meta of digests) { - try { - return withContentSriSync(cache, meta, fn) - } catch (err) { - lastErr = err - } - } - throw lastErr - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index 50612364e9b48..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const util = require('util') - -const contentPath = require('./path') -const { hasContent } = require('./read') -const rimraf = util.promisify(require('rimraf')) - -module.exports = rm - -function rm (cache, integrity) { - return hasContent(cache, integrity).then((content) => { - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - return rimraf(contentPath(cache, content.sri)).then(() => true) - } else { - return false - } - }) -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js deleted file mode 100644 index a71e81ad5e150..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,194 +0,0 @@ -'use strict' - -const util = require('util') - -const contentPath = require('./path') -const fixOwner = require('../util/fix-owner') -const fs = require('fs') -const moveFile = require('../util/move-file') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const { disposer } = require('./../util/disposer') -const fsm = require('fs-minipass') - -const writeFile = util.promisify(fs.writeFile) - -module.exports = write - -function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - if (algorithms && algorithms.length > 1) { - throw new Error('opts.algorithms only supports a single algorithm for now') - } - - if (typeof size === 'number' && data.length !== size) { - return Promise.reject(sizeError(size, data.length)) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - return Promise.reject(checksumError(integrity, sri)) - } - - return disposer(makeTmp(cache, opts), makeTmpDisposer, - (tmp) => { - return writeFile(tmp.target, data, { flag: 'wx' }) - .then(() => moveToDestination(tmp, cache, sri, opts)) - }) - .then(() => ({ integrity: sri, size: data.length })) -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -function handleContent (inputStream, cache, opts) { - return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { - return pipeToTmp(inputStream, cache, tmp.target, opts) - .then((res) => { - return moveToDestination( - tmp, - cache, - res.integrity, - opts - ).then(() => res) - }) - }) -} - -function pipeToTmp (inputStream, cache, tmpTarget, opts) { - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - // NB: this can throw if the hashStream has a problem with - // it, and the data is fully written. but pipeToTmp is only - // called in promisory contexts where that is handled. - const pipeline = new Pipeline( - inputStream, - hashStream, - outStream - ) - - return pipeline.promise() - .then(() => ({ integrity, size })) - .catch(er => rimraf(tmpTarget).then(() => { - throw er - })) -} - -function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ - target: tmpTarget, - moved: false, - })) -} - -function makeTmpDisposer (tmp) { - if (tmp.moved) { - return Promise.resolve() - } - - return rimraf(tmp.target) -} - -function moveToDestination (tmp, cache, sri, opts) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - - return fixOwner - .mkdirfix(cache, destDir) - .then(() => { - return moveFile(tmp.target, destination) - }) - .then(() => { - tmp.moved = true - return fixOwner.chownr(cache, destination) - }) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 426778b850963..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,412 +0,0 @@ -'use strict' - -const util = require('util') -const crypto = require('crypto') -const fs = require('fs') -const Minipass = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const { disposer } = require('./util/disposer') -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const moveFile = require('@npmcli/move-file') -const _rimraf = require('rimraf') -const rimraf = util.promisify(_rimraf) -rimraf.sync = _rimraf.sync - -const appendFile = util.promisify(fs.appendFile) -const readFile = util.promisify(fs.readFile) -const readdir = util.promisify(fs.readdir) -const writeFile = util.promisify(fs.writeFile) - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(target)) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rimraf(tmp.target) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await fixOwner.mkdirfix(cache, path.dirname(bucket)) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - try { - await fixOwner.chownr(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - } - - // write the file atomically - await disposer(setup(), teardown, write) - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -function insert (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - return fixOwner - .mkdirfix(cache, path.dirname(bucket)) - .then(() => { - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - }) - .then(() => fixOwner.chownr(cache, bucket)) - .catch((err) => { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - // There's a class of race conditions that happen when things get deleted - // during fixOwner, or between the two mkdirfix/chownr calls. - // - // It's perfectly fine to just not bother in those cases and lie - // that the index entry was written. Because it's a cache. - }) - .then(() => { - return formatEntry(cache, entry) - }) -} - -module.exports.insert.sync = insertSync - -function insertSync (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) - const stringified = JSON.stringify(entry) - fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - try { - fixOwner.chownr.sync(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -function find (cache, key) { - const bucket = bucketPath(cache, key) - return bucketEntries(bucket) - .then((entries) => { - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - }) -} - -module.exports.find.sync = findSync - -function findSync (cache, key) { - const bucket = bucketPath(cache, key) - try { - return bucketEntriesSync(bucket).reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf(bucket) -} - -module.exports.delete.sync = delSync - -function delSync (cache, key, opts = {}) { - if (!opts.removeFully) { - return insertSync(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf.sync(bucket) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - readdirOrEmpty(indexDir).then(buckets => Promise.all( - buckets.map(bucket => { - const bucketPath = path.join(indexDir, bucket) - return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( - subbuckets.map(subbucket => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - return readdirOrEmpty(subbucketPath).then(entries => Promise.all( - entries.map(entry => { - const entryPath = path.join(subbucketPath, entry) - return bucketEntries(entryPath).then(entries => - // using a Map here prevents duplicate keys from - // showing up twice, I guess? - entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - ).then(reduced => { - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - }).catch(err => { - if (err.code === 'ENOENT') { - return undefined - } - throw err - }) - }) - )) - }) - )) - }) - )) - .then( - () => stream.end(), - err => stream.emit('error', err) - ) - - return stream -} - -module.exports.ls = ls - -function ls (cache) { - return lsStream(cache).collect().then(entries => - entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) - ) -} - -module.exports.bucketEntries = bucketEntries - -function bucketEntries (bucket, filter) { - return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) -} - -module.exports.bucketEntries.sync = bucketEntriesSync - -function bucketEntriesSync (bucket, filter) { - const data = fs.readFileSync(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data, filter) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (e) { - // Entry is corrupted! - return - } - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js deleted file mode 100644 index d9d4bf4c6416f..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,251 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') -const fs = require('fs') -const util = require('util') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -const writeFile = util.promisify(fs.writeFile) - -function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve({ - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - }) - } - - return index.find(cache, key, opts).then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - return read(cache, entry.integrity, { integrity, size }).then((data) => { - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) -} -module.exports = getData - -function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized) - } - - return read(cache, key, { integrity, size }).then((res) => { - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res - }) -} -module.exports.byDigest = getDataByDigest - -function getDataSync (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - const entry = index.find.sync(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = read.sync(cache, entry.integrity, { - integrity: integrity, - size: size, - }) - const res = { - metadata: entry.metadata, - data: data, - size: entry.size, - integrity: entry.integrity, - } - if (memoize) { - memo.put(cache, entry, res.data, opts) - } - - return res -} - -module.exports.sync = getDataSync - -function getDataByDigestSync (cache, digest, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, digest, opts) - - if (memoized && memoize !== false) { - return memoized - } - - const res = read.sync(cache, digest, { - integrity: integrity, - size: size, - }) - if (memoize) { - memo.put.byDigest(cache, digest, res, opts) - } - - return res -} -module.exports.sync.byDigest = getDataByDigestSync - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - index - .find(cache, key) - .then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - }) - .catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -function copy (cache, key, dest, opts = {}) { - if (read.copy) { - return index.find(cache, key, opts).then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - return read.copy(cache, entry.integrity, dest, opts) - .then(() => { - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) - } - - return getData(cache, key, opts).then((res) => { - return writeFile(dest, res.data).then(() => { - return { - metadata: res.metadata, - size: res.size, - integrity: res.integrity, - } - }) - }) -} -module.exports.copy = copy - -function copyByDigest (cache, key, dest, opts = {}) { - if (read.copy) { - return read.copy(cache, key, dest, opts).then(() => key) - } - - return getDataByDigest(cache, key, opts).then((res) => { - return writeFile(dest, res).then(() => key) - }) -} -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js deleted file mode 100644 index 1c56be68dd8fd..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.sync = get.sync -module.exports.get.sync.byDigest = get.sync.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent -module.exports.get.hasContent.sync = get.hasContent.sync - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js deleted file mode 100644 index e1b13dd5fd528..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,74 +0,0 @@ -'use strict' - -const LRU = require('lru-cache') - -const MAX_SIZE = 50 * 1024 * 1024 // 50MB -const MAX_AGE = 3 * 60 * 1000 - -const MEMOIZED = new LRU({ - max: MAX_SIZE, - maxAge: MAX_AGE, - length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.reset() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js deleted file mode 100644 index d6904fa301272..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,87 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - return write(cache, data, opts).then((res) => { - return index - .insert(cache, key, res.integrity, { ...opts, size: res.size }) - .then((entry) => { - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity - }) - }) -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - flush () { - return index - .insert(cache, key, integrity, { ...opts, size }) - .then((entry) => { - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - - if (integrity) { - pipeline.emit('integrity', integrity) - } - - if (size) { - pipeline.emit('size', size) - } - }) - }, - })) - - return pipeline -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js deleted file mode 100644 index 5f00071770b8d..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const util = require('util') - -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -function all (cache) { - memo.clearMemoized() - return rimraf(path.join(cache, '*(content-*|index-*)')) -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js deleted file mode 100644 index 52d7d3edda7d5..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -module.exports.disposer = disposer - -function disposer (creatorFn, disposerFn, fn) { - const runDisposer = (resource, result, shouldThrow = false) => { - return disposerFn(resource) - .then( - // disposer resolved, do something with original fn's promise - () => { - if (shouldThrow) { - throw result - } - - return result - }, - // Disposer fn failed, crash process - (err) => { - throw err - // Or process.exit? - }) - } - - return creatorFn - .then((resource) => { - // fn(resource) can throw, so wrap in a promise here - return Promise.resolve().then(() => fn(resource)) - .then((result) => runDisposer(resource, result)) - .catch((err) => runDisposer(resource, err, true)) - }) -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js deleted file mode 100644 index bc14def4e405c..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js +++ /dev/null @@ -1,148 +0,0 @@ -'use strict' - -const util = require('util') - -const chownr = util.promisify(require('chownr')) -const mkdirp = require('mkdirp') -const inflight = require('promise-inflight') -const inferOwner = require('infer-owner') - -// Memoize getuid()/getgid() calls. -// patch process.setuid/setgid to invalidate cached value on change -const self = { uid: null, gid: null } -const getSelf = () => { - if (typeof self.uid !== 'number') { - self.uid = process.getuid() - const setuid = process.setuid - process.setuid = (uid) => { - self.uid = null - process.setuid = setuid - return process.setuid(uid) - } - } - if (typeof self.gid !== 'number') { - self.gid = process.getgid() - const setgid = process.setgid - process.setgid = (gid) => { - self.gid = null - process.setgid = setgid - return process.setgid(gid) - } - } -} - -module.exports.chownr = fixOwner - -function fixOwner (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return Promise.resolve() - } - - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return Promise.resolve() - } - - return Promise.resolve(inferOwner(cache)).then((owner) => { - const { uid, gid } = owner - - // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { - return - } - - return inflight('fixOwner: fixing ownership on ' + filepath, () => - chownr( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ).catch((err) => { - if (err.code === 'ENOENT') { - return null - } - - throw err - }) - ) - }) -} - -module.exports.chownr.sync = fixOwnerSync - -function fixOwnerSync (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - const { uid, gid } = inferOwner.sync(cache) - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - if (self.uid === uid && self.gid === gid) { - // No need to override if it's already what we used. - return - } - try { - chownr.sync( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ) - } catch (err) { - // only catch ENOENT, any other error is a problem. - if (err.code === 'ENOENT') { - return null - } - - throw err - } -} - -module.exports.mkdirfix = mkdirfix - -function mkdirfix (cache, p, cb) { - // we have to infer the owner _before_ making the directory, even though - // we aren't going to use the results, since the cache itself might not - // exist yet. If we mkdirp it, then our current uid/gid will be assumed - // to be correct if it creates the cache folder in the process. - return Promise.resolve(inferOwner(cache)).then(() => { - return mkdirp(p) - .then((made) => { - if (made) { - return fixOwner(cache, made).then(() => made) - } - }) - .catch((err) => { - if (err.code === 'EEXIST') { - return fixOwner(cache, p).then(() => null) - } - - throw err - }) - }) -} - -module.exports.mkdirfix.sync = mkdirfixSync - -function mkdirfixSync (cache, p) { - try { - inferOwner.sync(cache) - const made = mkdirp.sync(p) - if (made) { - fixOwnerSync(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - fixOwnerSync(cache, p) - return null - } else { - throw err - } - } -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js deleted file mode 100644 index 3739cea3df281..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js +++ /dev/null @@ -1,69 +0,0 @@ -'use strict' - -const fs = require('fs') -const util = require('util') -const chmod = util.promisify(fs.chmod) -const unlink = util.promisify(fs.unlink) -const stat = util.promisify(fs.stat) -const move = require('@npmcli/move-file') -const pinflight = require('promise-inflight') - -module.exports = moveFile - -function moveFile (src, dest) { - const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || - process.platform === 'win32' - - // This isn't quite an fs.rename -- the assumption is that - // if `dest` already exists, and we get certain errors while - // trying to move it, we should just not bother. - // - // In the case of cache corruption, users will receive an - // EINTEGRITY error elsewhere, and can remove the offending - // content their own way. - // - // Note that, as the name suggests, this strictly only supports file moves. - return new Promise((resolve, reject) => { - fs.link(src, dest, (err) => { - if (err) { - if (isWindows && err.code === 'EPERM') { - // XXX This is a really weird way to handle this situation, as it - // results in the src file being deleted even though the dest - // might not exist. Since we pretty much always write files to - // deterministic locations based on content hash, this is likely - // ok (or at worst, just ends in a future cache miss). But it would - // be worth investigating at some time in the future if this is - // really what we want to do here. - return resolve() - } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { - // file already exists, so whatever - return resolve() - } else { - return reject(err) - } - } else { - return resolve() - } - }) - }) - .then(() => { - // content should never change for any reason, so make it read-only - return Promise.all([ - unlink(src), - !isWindows && chmod(dest, '0444'), - ]) - }) - .catch(() => { - return pinflight('cacache-move-file:' + dest, () => { - return stat(dest).catch((err) => { - if (err.code !== 'ENOENT') { - // Something else is wrong here. Bail bail bail - throw err - } - // file doesn't already exist! let's try a rename -> copy fallback - // only delete if it successfully copies - return move(src, dest) - }) - }) - }) -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0a5a50eba3061..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') - -const fixOwner = require('./fix-owner') -const path = require('path') - -module.exports.mkdir = mktmpdir - -function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - .then(() => { - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) - }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) -} - -module.exports.fix = fixtmpdir - -function fixtmpdir (cache) { - return fixOwner(cache, path.join(cache, 'tmp')) -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js deleted file mode 100644 index 300cd9f9de1c4..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,291 +0,0 @@ -'use strict' - -const util = require('util') - -const pMap = require('p-map') -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const fs = require('fs') -const fsm = require('fs-minipass') -const glob = util.promisify(require('glob')) -const index = require('./entry-index') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const stat = util.promisify(fs.stat) -const truncate = util.promisify(fs.truncate) -const writeFile = util.promisify(fs.writeFile) -const readFile = util.promisify(fs.readFile) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - return steps - .reduce((promise, step, i) => { - const label = step.name - const start = new Date() - return promise.then((stats) => { - return step(cache, opts).then((s) => { - s && - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - - stats.runTime[label] = end - start - return Promise.resolve(stats) - }) - }) - }, Promise.resolve({})) - .then((stats) => { - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats - }) -} - -function markStartTime (cache, opts) { - return Promise.resolve({ startTime: new Date() }) -} - -function markEndTime (cache, opts) { - return Promise.resolve({ endTime: new Date() }) -} - -function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - return fixOwner - .mkdirfix(cache, cache) - .then(() => { - // TODO - fix file permissions too - return fixOwner.chownr(cache, cache) - }) - .then(() => null) -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rimraf it. -// -function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - liveContent.add(entry.integrity.toString()) - }) - return new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }).then(() => { - const contentDir = contentPath.contentDir(cache) - return glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }).then((files) => { - return Promise.resolve({ - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - }).then((stats) => - pMap( - files, - (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - return verifyContent(f, integrity).then((info) => { - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - return stats - }) - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - return stat(f).then((s) => { - return rimraf(f).then(() => { - stats.reclaimedSize += s.size - return stats - }) - }) - } - }, - { concurrency: opts.concurrency } - ).then(() => stats) - ) - }) - }) -} - -function verifyContent (filepath, sri) { - return stat(filepath) - .then((s) => { - const contentInfo = { - size: s.size, - valid: true, - } - return ssri - .checkStream(new fsm.ReadStream(filepath), sri) - .catch((err) => { - if (err.code !== 'EINTEGRITY') { - throw err - } - - return rimraf(filepath).then(() => { - contentInfo.valid = false - }) - }) - .then(() => contentInfo) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - - throw err - }) -} - -function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - return index.ls(cache).then((entries) => { - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - return pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ).then(() => stats) - }) -} - -function rebuildBucket (cache, bucket, stats, opts) { - return truncate(bucket._path).then(() => { - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - return bucket.reduce((promise, entry) => { - return promise.then(() => { - const content = contentPath(cache, entry.integrity) - return stat(content) - .then(() => { - return index - .insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - }) - .then(() => { - stats.totalEntries++ - }) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - return - } - throw err - }) - }) - }, Promise.resolve()) - }) -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rimraf(path.join(cache, 'tmp')) -} - -function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - try { - return writeFile(verifile, '' + +new Date()) - } finally { - fixOwner.chownr.sync(cache, verifile) - } -} - -module.exports.lastRun = lastRun - -function lastRun (cache) { - return readFile(path.join(cache, '_lastverified'), 'utf8').then( - (data) => new Date(+data) - ) -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js deleted file mode 100644 index 573b6b85b9779..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js +++ /dev/null @@ -1,334 +0,0 @@ -'use strict' - -// A linked list to keep track of recently-used-ness -const Yallist = require('yallist') - -const MAX = Symbol('max') -const LENGTH = Symbol('length') -const LENGTH_CALCULATOR = Symbol('lengthCalculator') -const ALLOW_STALE = Symbol('allowStale') -const MAX_AGE = Symbol('maxAge') -const DISPOSE = Symbol('dispose') -const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') -const LRU_LIST = Symbol('lruList') -const CACHE = Symbol('cache') -const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') - -const naiveLength = () => 1 - -// lruList is a yallist where the head is the youngest -// item, and the tail is the oldest. the list contains the Hit -// objects as the entries. -// Each Hit object has a reference to its Yallist.Node. This -// never changes. -// -// cache is a Map (or PseudoMap) that matches the keys to -// the Yallist.Node object. -class LRUCache { - constructor (options) { - if (typeof options === 'number') - options = { max: options } - - if (!options) - options = {} - - if (options.max && (typeof options.max !== 'number' || options.max < 0)) - throw new TypeError('max must be a non-negative number') - // Kind of weird to have a default max of Infinity, but oh well. - const max = this[MAX] = options.max || Infinity - - const lc = options.length || naiveLength - this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc - this[ALLOW_STALE] = options.stale || false - if (options.maxAge && typeof options.maxAge !== 'number') - throw new TypeError('maxAge must be a number') - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false - this.reset() - } - - // resize the cache when the max changes. - set max (mL) { - if (typeof mL !== 'number' || mL < 0) - throw new TypeError('max must be a non-negative number') - - this[MAX] = mL || Infinity - trim(this) - } - get max () { - return this[MAX] - } - - set allowStale (allowStale) { - this[ALLOW_STALE] = !!allowStale - } - get allowStale () { - return this[ALLOW_STALE] - } - - set maxAge (mA) { - if (typeof mA !== 'number') - throw new TypeError('maxAge must be a non-negative number') - - this[MAX_AGE] = mA - trim(this) - } - get maxAge () { - return this[MAX_AGE] - } - - // resize the cache when the lengthCalculator changes. - set lengthCalculator (lC) { - if (typeof lC !== 'function') - lC = naiveLength - - if (lC !== this[LENGTH_CALCULATOR]) { - this[LENGTH_CALCULATOR] = lC - this[LENGTH] = 0 - this[LRU_LIST].forEach(hit => { - hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) - this[LENGTH] += hit.length - }) - } - trim(this) - } - get lengthCalculator () { return this[LENGTH_CALCULATOR] } - - get length () { return this[LENGTH] } - get itemCount () { return this[LRU_LIST].length } - - rforEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].tail; walker !== null;) { - const prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev - } - } - - forEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].head; walker !== null;) { - const next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next - } - } - - keys () { - return this[LRU_LIST].toArray().map(k => k.key) - } - - values () { - return this[LRU_LIST].toArray().map(k => k.value) - } - - reset () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) - } - - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list - } - - dump () { - return this[LRU_LIST].map(hit => - isStale(this, hit) ? false : { - k: hit.key, - v: hit.value, - e: hit.now + (hit.maxAge || 0) - }).toArray().filter(h => h) - } - - dumpLru () { - return this[LRU_LIST] - } - - set (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] - - if (maxAge && typeof maxAge !== 'number') - throw new TypeError('maxAge must be a number') - - const now = maxAge ? Date.now() : 0 - const len = this[LENGTH_CALCULATOR](value, key) - - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) - return false - } - - const node = this[CACHE].get(key) - const item = node.value - - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) - this[DISPOSE](key, item.value) - } - - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) - trim(this) - return true - } - - const hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) - this[DISPOSE](key, value) - - return false - } - - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true - } - - has (key) { - if (!this[CACHE].has(key)) return false - const hit = this[CACHE].get(key).value - return !isStale(this, hit) - } - - get (key) { - return get(this, key, true) - } - - peek (key) { - return get(this, key, false) - } - - pop () { - const node = this[LRU_LIST].tail - if (!node) - return null - - del(this, node) - return node.value - } - - del (key) { - del(this, this[CACHE].get(key)) - } - - load (arr) { - // reset the cache - this.reset() - - const now = Date.now() - // A previous serialized cache has the most recent items first - for (let l = arr.length - 1; l >= 0; l--) { - const hit = arr[l] - const expiresAt = hit.e || 0 - if (expiresAt === 0) - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - else { - const maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) - } - } - } - } - - prune () { - this[CACHE].forEach((value, key) => get(this, key, false)) - } -} - -const get = (self, key, doUse) => { - const node = self[CACHE].get(key) - if (node) { - const hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - return undefined - } else { - if (doUse) { - if (self[UPDATE_AGE_ON_GET]) - node.value.now = Date.now() - self[LRU_LIST].unshiftNode(node) - } - } - return hit.value - } -} - -const isStale = (self, hit) => { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) - return false - - const diff = Date.now() - hit.now - return hit.maxAge ? diff > hit.maxAge - : self[MAX_AGE] && (diff > self[MAX_AGE]) -} - -const trim = self => { - if (self[LENGTH] > self[MAX]) { - for (let walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { - // We know that we're about to delete this one, and also - // what the next least recently used key will be, so just - // go ahead and set it now. - const prev = walker.prev - del(self, walker) - walker = prev - } - } -} - -const del = (self, node) => { - if (node) { - const hit = node.value - if (self[DISPOSE]) - self[DISPOSE](hit.key, hit.value) - - self[LENGTH] -= hit.length - self[CACHE].delete(hit.key) - self[LRU_LIST].removeNode(node) - } -} - -class Entry { - constructor (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 - } -} - -const forEachStep = (self, fn, node, thisp) => { - let hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - hit = undefined - } - if (hit) - fn.call(thisp, hit.value, hit.key, self) -} - -module.exports = LRUCache diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json deleted file mode 100644 index 43b7502c3e7c7..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "6.0.0", - "author": "Isaac Z. Schlueter ", - "keywords": [ - "mru", - "lru", - "cache" - ], - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "main": "index.js", - "repository": "git://github.com/isaacs/node-lru-cache.git", - "devDependencies": { - "benchmark": "^2.1.4", - "tap": "^14.10.7" - }, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "files": [ - "index.js" - ], - "engines": { - "node": ">=10" - } -} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/package.json deleted file mode 100644 index b9efa92d9f3e0..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/cacache/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "name": "cacache", - "version": "16.0.0", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin", - "lib" - ], - "scripts": { - "benchmarks": "node test/benchmarks", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint '**/*.js'", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "npm-template-check", - "template-copy": "npm-template-copy --force", - "posttest": "npm run lint" - }, - "repository": "https://github.com/npm/cacache", - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - }, - "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "benchmark": "^2.1.4", - "chalk": "^4.0.0", - "require-inject": "^1.4.4", - "tacks": "^1.3.0", - "tap": "^15.0.9" - }, - "tap": { - "100": true, - "test-regex": "test/[^/]*.js" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - }, - "templateOSS": { - "windowsCI": false, - "version": "2.9.2" - }, - "author": "GitHub Inc." -} diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/pacote/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/pacote/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/pacote/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/pacote/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 8bffb2af83cab..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,259 +0,0 @@ -'use strict' - -const util = require('util') - -const fs = require('fs') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -const lstat = util.promisify(fs.lstat) -const readFile = util.promisify(fs.readFile) - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -function read (cache, integrity, opts = {}) { - const { size } = opts - return withContentSri(cache, integrity, (cpath, sri) => { - // get size - return lstat(cpath).then(stat => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - return readFile(cpath, null).then((data) => { - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data - }) - }) -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.sync = readSync - -function readSync (cache, integrity, opts = {}) { - const { size } = opts - return withContentSriSync(cache, integrity, (cpath, sri) => { - const data = fs.readFileSync(cpath) - if (typeof size === 'number' && size !== data.length) { - throw sizeError(size, data.length) - } - - if (ssri.checkData(data, sri)) { - return data - } - - throw integrityError(sri, cpath) - }) -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - withContentSri(cache, integrity, (cpath, sri) => { - // just lstat to ensure it exists - return lstat(cpath).then((stat) => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && size !== stat.size) { - return stream.emit('error', sizeError(size, stat.size)) - } - - readPipeline(cpath, stat.size, sri, stream) - }, er => stream.emit('error', er)) - - return stream -} - -let copyFile -if (fs.copyFile) { - module.exports.copy = copy - module.exports.copy.sync = copySync - copyFile = util.promisify(fs.copyFile) -} - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath, sri) => { - return copyFile(cpath, dest) - }) -} - -function copySync (cache, integrity, dest) { - return withContentSriSync(cache, integrity, (cpath, sri) => { - return fs.copyFileSync(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -function hasContent (cache, integrity) { - if (!integrity) { - return Promise.resolve(false) - } - - return withContentSri(cache, integrity, (cpath, sri) => { - return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) - }).catch((err) => { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - }) -} - -module.exports.hasContent.sync = hasContentSync - -function hasContentSync (cache, integrity) { - if (!integrity) { - return false - } - - return withContentSriSync(cache, integrity, (cpath, sri) => { - try { - const stat = fs.lstatSync(cpath) - return { size: stat.size, sri, stat } - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } - }) -} - -function withContentSri (cache, integrity, fn) { - const tryFn = () => { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - return Promise - .all(digests.map((meta) => { - return withContentSri(cache, meta, fn) - .catch((err) => { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - }) - })) - .then((results) => { - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - }) - } - } - - return new Promise((resolve, reject) => { - try { - tryFn() - .then(resolve) - .catch(reject) - } catch (err) { - reject(err) - } - }) -} - -function withContentSriSync (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - let lastErr = null - for (const meta of digests) { - try { - return withContentSriSync(cache, meta, fn) - } catch (err) { - lastErr = err - } - } - throw lastErr - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/pacote/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index 50612364e9b48..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const util = require('util') - -const contentPath = require('./path') -const { hasContent } = require('./read') -const rimraf = util.promisify(require('rimraf')) - -module.exports = rm - -function rm (cache, integrity) { - return hasContent(cache, integrity).then((content) => { - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - return rimraf(contentPath(cache, content.sri)).then(() => true) - } else { - return false - } - }) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/pacote/node_modules/cacache/lib/content/write.js deleted file mode 100644 index a71e81ad5e150..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,194 +0,0 @@ -'use strict' - -const util = require('util') - -const contentPath = require('./path') -const fixOwner = require('../util/fix-owner') -const fs = require('fs') -const moveFile = require('../util/move-file') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const { disposer } = require('./../util/disposer') -const fsm = require('fs-minipass') - -const writeFile = util.promisify(fs.writeFile) - -module.exports = write - -function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - if (algorithms && algorithms.length > 1) { - throw new Error('opts.algorithms only supports a single algorithm for now') - } - - if (typeof size === 'number' && data.length !== size) { - return Promise.reject(sizeError(size, data.length)) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - return Promise.reject(checksumError(integrity, sri)) - } - - return disposer(makeTmp(cache, opts), makeTmpDisposer, - (tmp) => { - return writeFile(tmp.target, data, { flag: 'wx' }) - .then(() => moveToDestination(tmp, cache, sri, opts)) - }) - .then(() => ({ integrity: sri, size: data.length })) -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -function handleContent (inputStream, cache, opts) { - return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { - return pipeToTmp(inputStream, cache, tmp.target, opts) - .then((res) => { - return moveToDestination( - tmp, - cache, - res.integrity, - opts - ).then(() => res) - }) - }) -} - -function pipeToTmp (inputStream, cache, tmpTarget, opts) { - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - // NB: this can throw if the hashStream has a problem with - // it, and the data is fully written. but pipeToTmp is only - // called in promisory contexts where that is handled. - const pipeline = new Pipeline( - inputStream, - hashStream, - outStream - ) - - return pipeline.promise() - .then(() => ({ integrity, size })) - .catch(er => rimraf(tmpTarget).then(() => { - throw er - })) -} - -function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ - target: tmpTarget, - moved: false, - })) -} - -function makeTmpDisposer (tmp) { - if (tmp.moved) { - return Promise.resolve() - } - - return rimraf(tmp.target) -} - -function moveToDestination (tmp, cache, sri, opts) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - - return fixOwner - .mkdirfix(cache, destDir) - .then(() => { - return moveFile(tmp.target, destination) - }) - .then(() => { - tmp.moved = true - return fixOwner.chownr(cache, destination) - }) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/pacote/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 426778b850963..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,412 +0,0 @@ -'use strict' - -const util = require('util') -const crypto = require('crypto') -const fs = require('fs') -const Minipass = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const { disposer } = require('./util/disposer') -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const moveFile = require('@npmcli/move-file') -const _rimraf = require('rimraf') -const rimraf = util.promisify(_rimraf) -rimraf.sync = _rimraf.sync - -const appendFile = util.promisify(fs.appendFile) -const readFile = util.promisify(fs.readFile) -const readdir = util.promisify(fs.readdir) -const writeFile = util.promisify(fs.writeFile) - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(target)) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rimraf(tmp.target) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await fixOwner.mkdirfix(cache, path.dirname(bucket)) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - try { - await fixOwner.chownr(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - } - - // write the file atomically - await disposer(setup(), teardown, write) - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -function insert (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - return fixOwner - .mkdirfix(cache, path.dirname(bucket)) - .then(() => { - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - }) - .then(() => fixOwner.chownr(cache, bucket)) - .catch((err) => { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - // There's a class of race conditions that happen when things get deleted - // during fixOwner, or between the two mkdirfix/chownr calls. - // - // It's perfectly fine to just not bother in those cases and lie - // that the index entry was written. Because it's a cache. - }) - .then(() => { - return formatEntry(cache, entry) - }) -} - -module.exports.insert.sync = insertSync - -function insertSync (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) - const stringified = JSON.stringify(entry) - fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - try { - fixOwner.chownr.sync(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -function find (cache, key) { - const bucket = bucketPath(cache, key) - return bucketEntries(bucket) - .then((entries) => { - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - }) -} - -module.exports.find.sync = findSync - -function findSync (cache, key) { - const bucket = bucketPath(cache, key) - try { - return bucketEntriesSync(bucket).reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf(bucket) -} - -module.exports.delete.sync = delSync - -function delSync (cache, key, opts = {}) { - if (!opts.removeFully) { - return insertSync(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf.sync(bucket) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - readdirOrEmpty(indexDir).then(buckets => Promise.all( - buckets.map(bucket => { - const bucketPath = path.join(indexDir, bucket) - return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( - subbuckets.map(subbucket => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - return readdirOrEmpty(subbucketPath).then(entries => Promise.all( - entries.map(entry => { - const entryPath = path.join(subbucketPath, entry) - return bucketEntries(entryPath).then(entries => - // using a Map here prevents duplicate keys from - // showing up twice, I guess? - entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - ).then(reduced => { - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - }).catch(err => { - if (err.code === 'ENOENT') { - return undefined - } - throw err - }) - }) - )) - }) - )) - }) - )) - .then( - () => stream.end(), - err => stream.emit('error', err) - ) - - return stream -} - -module.exports.ls = ls - -function ls (cache) { - return lsStream(cache).collect().then(entries => - entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) - ) -} - -module.exports.bucketEntries = bucketEntries - -function bucketEntries (bucket, filter) { - return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) -} - -module.exports.bucketEntries.sync = bucketEntriesSync - -function bucketEntriesSync (bucket, filter) { - const data = fs.readFileSync(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data, filter) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (e) { - // Entry is corrupted! - return - } - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/pacote/node_modules/cacache/lib/get.js deleted file mode 100644 index d9d4bf4c6416f..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,251 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') -const fs = require('fs') -const util = require('util') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -const writeFile = util.promisify(fs.writeFile) - -function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve({ - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - }) - } - - return index.find(cache, key, opts).then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - return read(cache, entry.integrity, { integrity, size }).then((data) => { - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) -} -module.exports = getData - -function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized) - } - - return read(cache, key, { integrity, size }).then((res) => { - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res - }) -} -module.exports.byDigest = getDataByDigest - -function getDataSync (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - const entry = index.find.sync(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = read.sync(cache, entry.integrity, { - integrity: integrity, - size: size, - }) - const res = { - metadata: entry.metadata, - data: data, - size: entry.size, - integrity: entry.integrity, - } - if (memoize) { - memo.put(cache, entry, res.data, opts) - } - - return res -} - -module.exports.sync = getDataSync - -function getDataByDigestSync (cache, digest, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, digest, opts) - - if (memoized && memoize !== false) { - return memoized - } - - const res = read.sync(cache, digest, { - integrity: integrity, - size: size, - }) - if (memoize) { - memo.put.byDigest(cache, digest, res, opts) - } - - return res -} -module.exports.sync.byDigest = getDataByDigestSync - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - index - .find(cache, key) - .then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - }) - .catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -function copy (cache, key, dest, opts = {}) { - if (read.copy) { - return index.find(cache, key, opts).then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - return read.copy(cache, entry.integrity, dest, opts) - .then(() => { - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) - } - - return getData(cache, key, opts).then((res) => { - return writeFile(dest, res.data).then(() => { - return { - metadata: res.metadata, - size: res.size, - integrity: res.integrity, - } - }) - }) -} -module.exports.copy = copy - -function copyByDigest (cache, key, dest, opts = {}) { - if (read.copy) { - return read.copy(cache, key, dest, opts).then(() => key) - } - - return getDataByDigest(cache, key, opts).then((res) => { - return writeFile(dest, res).then(() => key) - }) -} -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/pacote/node_modules/cacache/lib/index.js deleted file mode 100644 index 1c56be68dd8fd..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.sync = get.sync -module.exports.get.sync.byDigest = get.sync.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent -module.exports.get.hasContent.sync = get.hasContent.sync - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/pacote/node_modules/cacache/lib/memoization.js deleted file mode 100644 index e1b13dd5fd528..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,74 +0,0 @@ -'use strict' - -const LRU = require('lru-cache') - -const MAX_SIZE = 50 * 1024 * 1024 // 50MB -const MAX_AGE = 3 * 60 * 1000 - -const MEMOIZED = new LRU({ - max: MAX_SIZE, - maxAge: MAX_AGE, - length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.reset() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/pacote/node_modules/cacache/lib/put.js deleted file mode 100644 index d6904fa301272..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,87 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - return write(cache, data, opts).then((res) => { - return index - .insert(cache, key, res.integrity, { ...opts, size: res.size }) - .then((entry) => { - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity - }) - }) -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - flush () { - return index - .insert(cache, key, integrity, { ...opts, size }) - .then((entry) => { - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - - if (integrity) { - pipeline.emit('integrity', integrity) - } - - if (size) { - pipeline.emit('size', size) - } - }) - }, - })) - - return pipeline -} diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/pacote/node_modules/cacache/lib/rm.js deleted file mode 100644 index 5f00071770b8d..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const util = require('util') - -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -function all (cache) { - memo.clearMemoized() - return rimraf(path.join(cache, '*(content-*|index-*)')) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/disposer.js b/node_modules/pacote/node_modules/cacache/lib/util/disposer.js deleted file mode 100644 index 52d7d3edda7d5..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/disposer.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -module.exports.disposer = disposer - -function disposer (creatorFn, disposerFn, fn) { - const runDisposer = (resource, result, shouldThrow = false) => { - return disposerFn(resource) - .then( - // disposer resolved, do something with original fn's promise - () => { - if (shouldThrow) { - throw result - } - - return result - }, - // Disposer fn failed, crash process - (err) => { - throw err - // Or process.exit? - }) - } - - return creatorFn - .then((resource) => { - // fn(resource) can throw, so wrap in a promise here - return Promise.resolve().then(() => fn(resource)) - .then((result) => runDisposer(resource, result)) - .catch((err) => runDisposer(resource, err, true)) - }) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/fix-owner.js b/node_modules/pacote/node_modules/cacache/lib/util/fix-owner.js deleted file mode 100644 index bc14def4e405c..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/fix-owner.js +++ /dev/null @@ -1,148 +0,0 @@ -'use strict' - -const util = require('util') - -const chownr = util.promisify(require('chownr')) -const mkdirp = require('mkdirp') -const inflight = require('promise-inflight') -const inferOwner = require('infer-owner') - -// Memoize getuid()/getgid() calls. -// patch process.setuid/setgid to invalidate cached value on change -const self = { uid: null, gid: null } -const getSelf = () => { - if (typeof self.uid !== 'number') { - self.uid = process.getuid() - const setuid = process.setuid - process.setuid = (uid) => { - self.uid = null - process.setuid = setuid - return process.setuid(uid) - } - } - if (typeof self.gid !== 'number') { - self.gid = process.getgid() - const setgid = process.setgid - process.setgid = (gid) => { - self.gid = null - process.setgid = setgid - return process.setgid(gid) - } - } -} - -module.exports.chownr = fixOwner - -function fixOwner (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return Promise.resolve() - } - - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return Promise.resolve() - } - - return Promise.resolve(inferOwner(cache)).then((owner) => { - const { uid, gid } = owner - - // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { - return - } - - return inflight('fixOwner: fixing ownership on ' + filepath, () => - chownr( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ).catch((err) => { - if (err.code === 'ENOENT') { - return null - } - - throw err - }) - ) - }) -} - -module.exports.chownr.sync = fixOwnerSync - -function fixOwnerSync (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - const { uid, gid } = inferOwner.sync(cache) - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - if (self.uid === uid && self.gid === gid) { - // No need to override if it's already what we used. - return - } - try { - chownr.sync( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ) - } catch (err) { - // only catch ENOENT, any other error is a problem. - if (err.code === 'ENOENT') { - return null - } - - throw err - } -} - -module.exports.mkdirfix = mkdirfix - -function mkdirfix (cache, p, cb) { - // we have to infer the owner _before_ making the directory, even though - // we aren't going to use the results, since the cache itself might not - // exist yet. If we mkdirp it, then our current uid/gid will be assumed - // to be correct if it creates the cache folder in the process. - return Promise.resolve(inferOwner(cache)).then(() => { - return mkdirp(p) - .then((made) => { - if (made) { - return fixOwner(cache, made).then(() => made) - } - }) - .catch((err) => { - if (err.code === 'EEXIST') { - return fixOwner(cache, p).then(() => null) - } - - throw err - }) - }) -} - -module.exports.mkdirfix.sync = mkdirfixSync - -function mkdirfixSync (cache, p) { - try { - inferOwner.sync(cache) - const made = mkdirp.sync(p) - if (made) { - fixOwnerSync(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - fixOwnerSync(cache, p) - return null - } else { - throw err - } - } -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/move-file.js b/node_modules/pacote/node_modules/cacache/lib/util/move-file.js deleted file mode 100644 index 3739cea3df281..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/move-file.js +++ /dev/null @@ -1,69 +0,0 @@ -'use strict' - -const fs = require('fs') -const util = require('util') -const chmod = util.promisify(fs.chmod) -const unlink = util.promisify(fs.unlink) -const stat = util.promisify(fs.stat) -const move = require('@npmcli/move-file') -const pinflight = require('promise-inflight') - -module.exports = moveFile - -function moveFile (src, dest) { - const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || - process.platform === 'win32' - - // This isn't quite an fs.rename -- the assumption is that - // if `dest` already exists, and we get certain errors while - // trying to move it, we should just not bother. - // - // In the case of cache corruption, users will receive an - // EINTEGRITY error elsewhere, and can remove the offending - // content their own way. - // - // Note that, as the name suggests, this strictly only supports file moves. - return new Promise((resolve, reject) => { - fs.link(src, dest, (err) => { - if (err) { - if (isWindows && err.code === 'EPERM') { - // XXX This is a really weird way to handle this situation, as it - // results in the src file being deleted even though the dest - // might not exist. Since we pretty much always write files to - // deterministic locations based on content hash, this is likely - // ok (or at worst, just ends in a future cache miss). But it would - // be worth investigating at some time in the future if this is - // really what we want to do here. - return resolve() - } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { - // file already exists, so whatever - return resolve() - } else { - return reject(err) - } - } else { - return resolve() - } - }) - }) - .then(() => { - // content should never change for any reason, so make it read-only - return Promise.all([ - unlink(src), - !isWindows && chmod(dest, '0444'), - ]) - }) - .catch(() => { - return pinflight('cacache-move-file:' + dest, () => { - return stat(dest).catch((err) => { - if (err.code !== 'ENOENT') { - // Something else is wrong here. Bail bail bail - throw err - } - // file doesn't already exist! let's try a rename -> copy fallback - // only delete if it successfully copies - return move(src, dest) - }) - }) - }) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0a5a50eba3061..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') - -const fixOwner = require('./fix-owner') -const path = require('path') - -module.exports.mkdir = mktmpdir - -function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - .then(() => { - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) - }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) -} - -module.exports.fix = fixtmpdir - -function fixtmpdir (cache) { - return fixOwner(cache, path.join(cache, 'tmp')) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/pacote/node_modules/cacache/lib/verify.js deleted file mode 100644 index 300cd9f9de1c4..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,291 +0,0 @@ -'use strict' - -const util = require('util') - -const pMap = require('p-map') -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const fs = require('fs') -const fsm = require('fs-minipass') -const glob = util.promisify(require('glob')) -const index = require('./entry-index') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const stat = util.promisify(fs.stat) -const truncate = util.promisify(fs.truncate) -const writeFile = util.promisify(fs.writeFile) -const readFile = util.promisify(fs.readFile) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - return steps - .reduce((promise, step, i) => { - const label = step.name - const start = new Date() - return promise.then((stats) => { - return step(cache, opts).then((s) => { - s && - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - - stats.runTime[label] = end - start - return Promise.resolve(stats) - }) - }) - }, Promise.resolve({})) - .then((stats) => { - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats - }) -} - -function markStartTime (cache, opts) { - return Promise.resolve({ startTime: new Date() }) -} - -function markEndTime (cache, opts) { - return Promise.resolve({ endTime: new Date() }) -} - -function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - return fixOwner - .mkdirfix(cache, cache) - .then(() => { - // TODO - fix file permissions too - return fixOwner.chownr(cache, cache) - }) - .then(() => null) -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rimraf it. -// -function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - liveContent.add(entry.integrity.toString()) - }) - return new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }).then(() => { - const contentDir = contentPath.contentDir(cache) - return glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }).then((files) => { - return Promise.resolve({ - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - }).then((stats) => - pMap( - files, - (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - return verifyContent(f, integrity).then((info) => { - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - return stats - }) - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - return stat(f).then((s) => { - return rimraf(f).then(() => { - stats.reclaimedSize += s.size - return stats - }) - }) - } - }, - { concurrency: opts.concurrency } - ).then(() => stats) - ) - }) - }) -} - -function verifyContent (filepath, sri) { - return stat(filepath) - .then((s) => { - const contentInfo = { - size: s.size, - valid: true, - } - return ssri - .checkStream(new fsm.ReadStream(filepath), sri) - .catch((err) => { - if (err.code !== 'EINTEGRITY') { - throw err - } - - return rimraf(filepath).then(() => { - contentInfo.valid = false - }) - }) - .then(() => contentInfo) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - - throw err - }) -} - -function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - return index.ls(cache).then((entries) => { - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - return pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ).then(() => stats) - }) -} - -function rebuildBucket (cache, bucket, stats, opts) { - return truncate(bucket._path).then(() => { - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - return bucket.reduce((promise, entry) => { - return promise.then(() => { - const content = contentPath(cache, entry.integrity) - return stat(content) - .then(() => { - return index - .insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - }) - .then(() => { - stats.totalEntries++ - }) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - return - } - throw err - }) - }) - }, Promise.resolve()) - }) -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rimraf(path.join(cache, 'tmp')) -} - -function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - try { - return writeFile(verifile, '' + +new Date()) - } finally { - fixOwner.chownr.sync(cache, verifile) - } -} - -module.exports.lastRun = lastRun - -function lastRun (cache) { - return readFile(path.join(cache, '_lastverified'), 'utf8').then( - (data) => new Date(+data) - ) -} diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/pacote/node_modules/cacache/package.json deleted file mode 100644 index b9efa92d9f3e0..0000000000000 --- a/node_modules/pacote/node_modules/cacache/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "name": "cacache", - "version": "16.0.0", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin", - "lib" - ], - "scripts": { - "benchmarks": "node test/benchmarks", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint '**/*.js'", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "npm-template-check", - "template-copy": "npm-template-copy --force", - "posttest": "npm run lint" - }, - "repository": "https://github.com/npm/cacache", - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - }, - "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "benchmark": "^2.1.4", - "chalk": "^4.0.0", - "require-inject": "^1.4.4", - "tacks": "^1.3.0", - "tap": "^15.0.9" - }, - "tap": { - "100": true, - "test-regex": "test/[^/]*.js" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - }, - "templateOSS": { - "windowsCI": false, - "version": "2.9.2" - }, - "author": "GitHub Inc." -} diff --git a/package-lock.json b/package-lock.json index fec5f4c6c13cc..dff028a83a400 100644 --- a/package-lock.json +++ b/package-lock.json @@ -97,7 +97,7 @@ "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "archy": "~1.0.0", - "cacache": "^15.3.0", + "cacache": "^16.0.0", "chalk": "^4.1.2", "chownr": "^2.0.0", "cli-columns": "^4.0.0", @@ -972,34 +972,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", - "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", - "dependencies": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, "node_modules/@npmcli/move-file": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", @@ -1641,32 +1613,32 @@ "inBundle": true }, "node_modules/cacache": { - "version": "15.3.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz", - "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==", + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", + "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", "inBundle": true, "dependencies": { "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.0.1", + "@npmcli/move-file": "^1.1.2", "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", + "fs-minipass": "^2.1.0", "glob": "^7.1.4", "infer-owner": "^1.0.4", "lru-cache": "^6.0.0", "minipass": "^3.1.1", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.2", - "mkdirp": "^1.0.3", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", "p-map": "^4.0.0", "promise-inflight": "^1.0.1", "rimraf": "^3.0.2", "ssri": "^8.0.1", - "tar": "^6.0.2", + "tar": "^6.1.11", "unique-filename": "^1.1.1" }, "engines": { - "node": ">= 10" + "node": "^12.13.0 || ^14.15.0 || >=16" } }, "node_modules/caching-transform": { @@ -5040,47 +5012,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/make-fetch-happen/node_modules/cacache": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", - "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", - "inBundle": true, - "dependencies": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, - "node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "inBundle": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/make-fetch-happen/node_modules/lru-cache": { "version": "7.5.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", @@ -6032,35 +5963,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/pacote/node_modules/cacache": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", - "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", - "inBundle": true, - "dependencies": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -10492,7 +10394,7 @@ "@npmcli/package-json": "^1.0.1", "@npmcli/run-script": "^3.0.0", "bin-links": "^3.0.0", - "cacache": "^15.0.3", + "cacache": "^16.0.0", "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", "json-stringify-nice": "^1.1.4", @@ -11439,7 +11341,7 @@ "@npmcli/template-oss": "^2.4.2", "benchmark": "^2.1.4", "bin-links": "^3.0.0", - "cacache": "^15.0.3", + "cacache": "16.0.0", "chalk": "^4.1.0", "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", @@ -11586,33 +11488,6 @@ "json-parse-even-better-errors": "^2.3.1", "pacote": "^13.0.3", "semver": "^7.3.5" - }, - "dependencies": { - "cacache": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", - "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", - "requires": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - } - } } }, "@npmcli/move-file": { @@ -12102,27 +11977,27 @@ "integrity": "sha1-y5T662HIaWRR2zZTThQi+U8K7og=" }, "cacache": { - "version": "15.3.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz", - "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==", + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", + "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", "requires": { "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.0.1", + "@npmcli/move-file": "^1.1.2", "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", + "fs-minipass": "^2.1.0", "glob": "^7.1.4", "infer-owner": "^1.0.4", "lru-cache": "^6.0.0", "minipass": "^3.1.1", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.2", - "mkdirp": "^1.0.3", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", "p-map": "^4.0.0", "promise-inflight": "^1.0.1", "rimraf": "^3.0.2", "ssri": "^8.0.1", - "tar": "^6.0.2", + "tar": "^6.1.11", "unique-filename": "^1.1.1" } }, @@ -14864,41 +14739,6 @@ "ssri": "^8.0.1" }, "dependencies": { - "cacache": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", - "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", - "requires": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - }, - "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - } - } - }, "lru-cache": { "version": "7.5.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", @@ -15594,33 +15434,6 @@ "rimraf": "^3.0.2", "ssri": "^8.0.1", "tar": "^6.1.11" - }, - "dependencies": { - "cacache": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", - "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", - "requires": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.1.2", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.11", - "unique-filename": "^1.1.1" - } - } } }, "parent-module": { diff --git a/package.json b/package.json index aee3b139aadbc..8cf2d111d6397 100644 --- a/package.json +++ b/package.json @@ -65,7 +65,7 @@ "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "archy": "~1.0.0", - "cacache": "^15.3.0", + "cacache": "^16.0.0", "chalk": "^4.1.2", "chownr": "^2.0.0", "cli-columns": "^4.0.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index bdc92326e02d8..e1b3c2e9ba624 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -13,7 +13,7 @@ "@npmcli/package-json": "^1.0.1", "@npmcli/run-script": "^3.0.0", "bin-links": "^3.0.0", - "cacache": "^15.0.3", + "cacache": "^16.0.0", "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", "json-stringify-nice": "^1.1.4", From 91b826e7fb10d88077454845445fdbd4b9dca585 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 15 Mar 2022 12:44:55 -0700 Subject: [PATCH 05/11] deps: init-package-json@3.0.1 --- .../node_modules/hosted-git-info/LICENSE | 13 + .../hosted-git-info/lib/git-host-info.js | 185 ++++++ .../hosted-git-info/lib/git-host.js | 110 ++++ .../node_modules/hosted-git-info/lib/index.js | 244 +++++++ .../node_modules/hosted-git-info/package.json | 56 ++ .../node_modules/lru-cache/LICENSE | 15 + .../node_modules/lru-cache/index.js | 615 ++++++++++++++++++ .../node_modules/lru-cache/package.json | 43 ++ .../normalize-package-data/LICENSE | 15 + .../lib/extract_description.js | 22 + .../normalize-package-data/lib/fixer.js | 475 ++++++++++++++ .../lib/make_warning.js | 22 + .../normalize-package-data/lib/normalize.js | 48 ++ .../normalize-package-data/lib/safe_format.js | 11 + .../normalize-package-data/lib/typos.json | 25 + .../lib/warning_messages.json | 30 + .../normalize-package-data/package.json | 52 ++ .../node_modules/read-package-json/LICENSE | 15 + .../read-package-json/lib/read-json.js | 605 +++++++++++++++++ .../read-package-json/package.json | 55 ++ node_modules/init-package-json/package.json | 10 +- package-lock.json | 108 ++- package.json | 2 +- 23 files changed, 2760 insertions(+), 16 deletions(-) create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/LICENSE create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host-info.js create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host.js create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/package.json create mode 100644 node_modules/init-package-json/node_modules/lru-cache/LICENSE create mode 100644 node_modules/init-package-json/node_modules/lru-cache/index.js create mode 100644 node_modules/init-package-json/node_modules/lru-cache/package.json create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/LICENSE create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/package.json create mode 100644 node_modules/init-package-json/node_modules/read-package-json/LICENSE create mode 100644 node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js create mode 100644 node_modules/init-package-json/node_modules/read-package-json/package.json diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE b/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE new file mode 100644 index 0000000000000..45055763dc838 --- /dev/null +++ b/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host-info.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host-info.js new file mode 100644 index 0000000000000..9a9720fa3c339 --- /dev/null +++ b/node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host-info.js @@ -0,0 +1,185 @@ +/* eslint-disable max-len */ +'use strict' +const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' +const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' + +const defaults = { + sshtemplate: ({ domain, user, project, committish }) => `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, user, project, committish }) => `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + browsetemplate: ({ domain, user, project, committish, treepath }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, + browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'master')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/raw/${maybeEncode(committish) || 'master'}/${path}`, + shortcuttemplate: ({ type, user, project, committish }) => `${type}:${user}/${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ user, project, committish }) => `${user}/${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, user, project }) => `https://${domain}/${user}/${project}/issues`, + hashformat: formatHashFragment, +} + +const gitHosts = {} +gitHosts.github = Object.assign({}, defaults, { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'github.com', + treepath: 'tree', + filetemplate: ({ auth, user, project, committish, path }) => `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish) || 'master'}/${path}`, + gittemplate: ({ auth, domain, user, project, committish }) => `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, + extract: (url) => { + let [, user, project, type, committish] = url.pathname.split('/', 5) + if (type && type !== 'tree') { + return + } + + if (!type) { + committish = url.hash.slice(1) + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish } + }, +}) + +gitHosts.bitbucket = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'bitbucket.org', + treepath: 'src', + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/get/${maybeEncode(committish) || 'master'}.tar.gz`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (['get'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +}) + +gitHosts.gitlab = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gitlab.com', + treepath: 'tree', + httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish) || 'master'}`, + extract: (url) => { + const path = url.pathname.slice(1) + if (path.includes('/-/') || path.includes('/archive.tar.gz')) { + return + } + + const segments = path.split('/') + let project = segments.pop() + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + const user = segments.join('/') + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +}) + +gitHosts.gist = Object.assign({}, defaults, { + protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gist.github.com', + sshtemplate: ({ domain, project, committish }) => `git@${domain}:${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, project, committish }) => `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, + browsetemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + browsefiletemplate: ({ domain, project, committish, path, hashformat }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + docstemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + httpstemplate: ({ domain, project, committish }) => `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ user, project, committish, path }) => `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, + shortcuttemplate: ({ type, project, committish }) => `${type}:${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ project, committish }) => `${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, project }) => `https://${domain}/${project}`, + gittemplate: ({ domain, project, committish }) => `git://${domain}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ project, committish }) => `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (aux === 'raw') { + return + } + + if (!project) { + if (!user) { + return + } + + project = user + user = null + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + return { user, project, committish: url.hash.slice(1) } + }, + hashformat: function (fragment) { + return fragment && 'file-' + formatHashFragment(fragment) + }, +}) + +gitHosts.sourcehut = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'https:'], + domain: 'git.sr.ht', + treepath: 'tree', + browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'main')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'main'}/${path}`, + httpstemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'main'}.tar.gz`, + bugstemplate: ({ domain, user, project }) => `https://todo.sr.ht/${user}/${project}`, + docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + + // tarball url + if (['archive'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +}) + +const names = Object.keys(gitHosts) +gitHosts.byShortcut = {} +gitHosts.byDomain = {} +for (const name of names) { + gitHosts.byShortcut[`${name}:`] = name + gitHosts.byDomain[gitHosts[name].domain] = name +} + +function formatHashFragment (fragment) { + return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') +} + +module.exports = gitHosts diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host.js new file mode 100644 index 0000000000000..8a975e92e58bb --- /dev/null +++ b/node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host.js @@ -0,0 +1,110 @@ +'use strict' +const gitHosts = require('./git-host-info.js') + +class GitHost { + constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { + Object.assign(this, gitHosts[type]) + this.type = type + this.user = user + this.auth = auth + this.project = project + this.committish = committish + this.default = defaultRepresentation + this.opts = opts + } + + hash () { + return this.committish ? `#${this.committish}` : '' + } + + ssh (opts) { + return this._fill(this.sshtemplate, opts) + } + + _fill (template, opts) { + if (typeof template === 'function') { + const options = { ...this, ...this.opts, ...opts } + + // the path should always be set so we don't end up with 'undefined' in urls + if (!options.path) { + options.path = '' + } + + // template functions will insert the leading slash themselves + if (options.path.startsWith('/')) { + options.path = options.path.slice(1) + } + + if (options.noCommittish) { + options.committish = null + } + + const result = template(options) + return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result + } + + return null + } + + sshurl (opts) { + return this._fill(this.sshurltemplate, opts) + } + + browse (path, fragment, opts) { + // not a string, treat path as opts + if (typeof path !== 'string') { + return this._fill(this.browsetemplate, path) + } + + if (typeof fragment !== 'string') { + opts = fragment + fragment = null + } + return this._fill(this.browsefiletemplate, { ...opts, fragment, path }) + } + + docs (opts) { + return this._fill(this.docstemplate, opts) + } + + bugs (opts) { + return this._fill(this.bugstemplate, opts) + } + + https (opts) { + return this._fill(this.httpstemplate, opts) + } + + git (opts) { + return this._fill(this.gittemplate, opts) + } + + shortcut (opts) { + return this._fill(this.shortcuttemplate, opts) + } + + path (opts) { + return this._fill(this.pathtemplate, opts) + } + + tarball (opts) { + return this._fill(this.tarballtemplate, { ...opts, noCommittish: false }) + } + + file (path, opts) { + return this._fill(this.filetemplate, { ...opts, path }) + } + + getDefaultRepresentation () { + return this.default + } + + toString (opts) { + if (this.default && typeof this[this.default] === 'function') { + return this[this.default](opts) + } + + return this.sshurl(opts) + } +} +module.exports = GitHost diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js new file mode 100644 index 0000000000000..8bce6b3c28d51 --- /dev/null +++ b/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js @@ -0,0 +1,244 @@ +'use strict' +const url = require('url') +const gitHosts = require('./git-host-info.js') +const GitHost = module.exports = require('./git-host.js') +const LRU = require('lru-cache') +const cache = new LRU({ max: 1000 }) + +const protocolToRepresentationMap = { + 'git+ssh:': 'sshurl', + 'git+https:': 'https', + 'ssh:': 'sshurl', + 'git:': 'git', +} + +function protocolToRepresentation (protocol) { + return protocolToRepresentationMap[protocol] || protocol.slice(0, -1) +} + +const authProtocols = { + 'git:': true, + 'https:': true, + 'git+https:': true, + 'http:': true, + 'git+http:': true, +} + +const knownProtocols = Object.keys(gitHosts.byShortcut) + .concat(['http:', 'https:', 'git:', 'git+ssh:', 'git+https:', 'ssh:']) + +module.exports.fromUrl = function (giturl, opts) { + if (typeof giturl !== 'string') { + return + } + + const key = giturl + JSON.stringify(opts || {}) + + if (!cache.has(key)) { + cache.set(key, fromUrl(giturl, opts)) + } + + return cache.get(key) +} + +function fromUrl (giturl, opts) { + if (!giturl) { + return + } + + const url = isGitHubShorthand(giturl) ? 'github:' + giturl : correctProtocol(giturl) + const parsed = parseGitUrl(url) + if (!parsed) { + return parsed + } + + const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] + const gitHostDomain = + gitHosts.byDomain[parsed.hostname.startsWith('www.') ? + parsed.hostname.slice(4) : + parsed.hostname] + const gitHostName = gitHostShortcut || gitHostDomain + if (!gitHostName) { + return + } + + const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] + let auth = null + if (authProtocols[parsed.protocol] && (parsed.username || parsed.password)) { + auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` + } + + let committish = null + let user = null + let project = null + let defaultRepresentation = null + + try { + if (gitHostShortcut) { + let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname + const firstAt = pathname.indexOf('@') + // we ignore auth for shortcuts, so just trim it out + if (firstAt > -1) { + pathname = pathname.slice(firstAt + 1) + } + + const lastSlash = pathname.lastIndexOf('/') + if (lastSlash > -1) { + user = decodeURIComponent(pathname.slice(0, lastSlash)) + // we want nulls only, never empty strings + if (!user) { + user = null + } + project = decodeURIComponent(pathname.slice(lastSlash + 1)) + } else { + project = decodeURIComponent(pathname) + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (parsed.hash) { + committish = decodeURIComponent(parsed.hash.slice(1)) + } + + defaultRepresentation = 'shortcut' + } else { + if (!gitHostInfo.protocols.includes(parsed.protocol)) { + return + } + + const segments = gitHostInfo.extract(parsed) + if (!segments) { + return + } + + user = segments.user && decodeURIComponent(segments.user) + project = decodeURIComponent(segments.project) + committish = decodeURIComponent(segments.committish) + defaultRepresentation = protocolToRepresentation(parsed.protocol) + } + } catch (err) { + /* istanbul ignore else */ + if (err instanceof URIError) { + return + } else { + throw err + } + } + + return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) +} + +// accepts input like git:github.com:user/repo and inserts the // after the first : +const correctProtocol = (arg) => { + const firstColon = arg.indexOf(':') + const proto = arg.slice(0, firstColon + 1) + if (knownProtocols.includes(proto)) { + return arg + } + + const firstAt = arg.indexOf('@') + if (firstAt > -1) { + if (firstAt > firstColon) { + return `git+ssh://${arg}` + } else { + return arg + } + } + + const doubleSlash = arg.indexOf('//') + if (doubleSlash === firstColon + 1) { + return arg + } + + return arg.slice(0, firstColon + 1) + '//' + arg.slice(firstColon + 1) +} + +// look for github shorthand inputs, such as npm/cli +const isGitHubShorthand = (arg) => { + // it cannot contain whitespace before the first # + // it cannot start with a / because that's probably an absolute file path + // but it must include a slash since repos are username/repository + // it cannot start with a . because that's probably a relative file path + // it cannot start with an @ because that's a scoped package if it passes the other tests + // it cannot contain a : before a # because that tells us that there's a protocol + // a second / may not exist before a # + const firstHash = arg.indexOf('#') + const firstSlash = arg.indexOf('/') + const secondSlash = arg.indexOf('/', firstSlash + 1) + const firstColon = arg.indexOf(':') + const firstSpace = /\s/.exec(arg) + const firstAt = arg.indexOf('@') + + const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) + const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) + const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) + const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) + const hasSlash = firstSlash > 0 + // if a # is found, what we really want to know is that the character + // immediately before # is not a / + const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') + const doesNotStartWithDot = !arg.startsWith('.') + + return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && + doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && + secondSlashOnlyAfterHash +} + +// attempt to correct an scp style url so that it will parse with `new URL()` +const correctUrl = (giturl) => { + const firstAt = giturl.indexOf('@') + const lastHash = giturl.lastIndexOf('#') + let firstColon = giturl.indexOf(':') + let lastColon = giturl.lastIndexOf(':', lastHash > -1 ? lastHash : Infinity) + + let corrected + if (lastColon > firstAt) { + // the last : comes after the first @ (or there is no @) + // like it would in: + // proto://hostname.com:user/repo + // username@hostname.com:user/repo + // :password@hostname.com:user/repo + // username:password@hostname.com:user/repo + // proto://username@hostname.com:user/repo + // proto://:password@hostname.com:user/repo + // proto://username:password@hostname.com:user/repo + // then we replace the last : with a / to create a valid path + corrected = giturl.slice(0, lastColon) + '/' + giturl.slice(lastColon + 1) + // // and we find our new : positions + firstColon = corrected.indexOf(':') + lastColon = corrected.lastIndexOf(':') + } + + if (firstColon === -1 && giturl.indexOf('//') === -1) { + // we have no : at all + // as it would be in: + // username@hostname.com/user/repo + // then we prepend a protocol + corrected = `git+ssh://${corrected}` + } + + return corrected +} + +// try to parse the url as its given to us, if that throws +// then we try to clean the url and parse that result instead +// THIS FUNCTION SHOULD NEVER THROW +const parseGitUrl = (giturl) => { + let result + try { + result = new url.URL(giturl) + } catch (err) {} + + if (result) { + return result + } + + const correctedUrl = correctUrl(giturl) + try { + result = new url.URL(correctedUrl) + } catch (err) {} + + return result +} diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/package.json b/node_modules/init-package-json/node_modules/hosted-git-info/package.json new file mode 100644 index 0000000000000..0153b0852cbf4 --- /dev/null +++ b/node_modules/init-package-json/node_modules/hosted-git-info/package.json @@ -0,0 +1,56 @@ +{ + "name": "hosted-git-info", + "version": "5.0.0", + "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", + "main": "./lib/index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/hosted-git-info.git" + }, + "keywords": [ + "git", + "github", + "bitbucket", + "gitlab" + ], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/hosted-git-info/issues" + }, + "homepage": "https://github.com/npm/hosted-git-info", + "scripts": { + "posttest": "npm run lint", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", + "snap": "tap", + "test": "tap", + "test:coverage": "tap --coverage-report=html", + "lint": "eslint '**/*.js'", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "lintfix": "npm run lint -- --fix" + }, + "dependencies": { + "lru-cache": "^7.5.1" + }, + "devDependencies": { + "@npmcli/template-oss": "^2.9.2", + "tap": "^15.1.6" + }, + "files": [ + "bin", + "lib" + ], + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "tap": { + "color": 1, + "coverage": true + }, + "templateOSS": { + "version": "2.9.2" + } +} diff --git a/node_modules/init-package-json/node_modules/lru-cache/LICENSE b/node_modules/init-package-json/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..9b58a3e03d1df --- /dev/null +++ b/node_modules/init-package-json/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.js b/node_modules/init-package-json/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..e37f51616452e --- /dev/null +++ b/node_modules/init-package-json/node_modules/lru-cache/index.js @@ -0,0 +1,615 @@ +const perf = typeof performance === 'object' && performance && + typeof performance.now === 'function' ? performance : Date + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} +const shouldWarn = (code) => typeof process === 'object' && + process && + !(process.noDeprecation || warned.has(code)) +const warn = (code, what, instead, fn) => { + warned.add(code) + process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => !isPosInt(max) ? null +: max <= Math.pow(2, 8) ? Uint8Array +: max <= Math.pow(2, 16) ? Uint16Array +: max <= Math.pow(2, 32) ? Uint32Array +: max <= Number.MAX_SAFE_INTEGER ? ZeroArray +: null + +class ZeroArray extends Array { + constructor (size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor (max) { + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push (n) { + this.heap[this.length++] = n + } + pop () { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor (options = {}) { + const { + max, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize, + sizeCalculation, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { + length, + maxAge, + stale, + } = options instanceof LRUCache ? {} : options + + if (!isPosInt(max)) { + throw new TypeError('max option must be an integer') + } + + const UintArray = getUintArray(max) + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize || 0 + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize') + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculating set to non-function') + } + } + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + + if (this.maxSize) { + if (!isPosInt(this.maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified') + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.updateAgeOnGet = !!updateAgeOnGet + this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified') + } + this.initializeTTLTracking() + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + initializeTTLTracking () { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + this.setItemTTL = (index, ttl) => { + this.starts[index] = ttl !== 0 ? perf.now() : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + this.updateItemAge = (index) => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout(() => cachedNow = 0, this.ttlResolution) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + this.isStale = (index) => { + return this.ttls[index] !== 0 && this.starts[index] !== 0 && + ((cachedNow || getNow()) - this.starts[index] > this.ttls[index]) + } + } + updateItemAge (index) {} + setItemTTL (index, ttl) {} + isStale (index) { return false } + + initializeSizeTracking () { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => this.calculatedSize -= this.sizes[index] + this.addItemSize = (index, v, k, size, sizeCalculation) => { + const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0) + this.sizes[index] = isPosInt(s) ? s : 0 + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict() + } + this.calculatedSize += this.sizes[index] + } + this.delete = k => { + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + this.calculatedSize -= this.sizes[index] + } + } + return LRUCache.prototype.delete.call(this, k) + } + } + removeItemSize (index) {} + addItemSize (index, v, k, size, sizeCalculation) {} + + *indexes ({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail, j; true; ) { + if (!this.isValidIndex(i)) { + break + } + j = i === this.head + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes ({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head, j; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + // either the tail now, or WAS the tail, and deleted + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex (index) { + return this.keyMap.get(this.keyList[index]) === index + } + + *entries () { + for (const i of this.indexes()) { + yield [this.keyList[i], this.valList[i]] + } + } + *rentries () { + for (const i of this.rindexes()) { + yield [this.keyList[i], this.valList[i]] + } + } + + *keys () { + for (const i of this.indexes()) { + yield this.keyList[i] + } + } + *rkeys () { + for (const i of this.rindexes()) { + yield this.keyList[i] + } + } + + *values () { + for (const i of this.indexes()) { + yield this.valList[i] + } + } + *rvalues () { + for (const i of this.rindexes()) { + yield this.valList[i] + } + } + + [Symbol.iterator] () { + return this.entries() + } + + find (fn, getOptions = {}) { + for (const i of this.indexes()) { + if (fn(this.valList[i], this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach (fn, thisp = this) { + for (const i of this.indexes()) { + fn.call(thisp, this.valList[i], this.keyList[i], this) + } + } + + rforEach (fn, thisp = this) { + for (const i of this.rindexes()) { + fn.call(thisp, this.valList[i], this.keyList[i], this) + } + } + + get prune () { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale () { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump () { + const arr = [] + for (const i of this.indexes()) { + const key = this.keyList[i] + const value = this.valList[i] + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load (arr) { + this.clear() + for (const [key, entry] of arr) { + this.set(key, entry.value, entry) + } + } + + dispose (v, k, reason) {} + + set (k, v, { + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + } = {}) { + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size ++ + this.addItemSize(index, v, k, size, sizeCalculation) + noUpdateTTL = false + } else { + // update + const oldVal = this.valList[index] + if (v !== oldVal) { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, v, k, size, sizeCalculation) + } + this.moveToTail(index) + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl) + } + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex () { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max) { + return this.evict() + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop () { + if (this.size) { + const val = this.valList[this.head] + this.evict() + return val + } + } + + evict () { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + this.removeItemSize(head) + this.head = this.next[head] + this.keyMap.delete(k) + this.size -- + return head + } + + has (k) { + return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k)) + } + + // like get(), but without any LRU updating or TTL expiration + peek (k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + return this.valList[index] + } + } + + get (k, { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (this.isStale(index)) { + const value = allowStale ? this.valList[index] : undefined + this.delete(k) + return value + } else { + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return this.valList[index] + } + } + } + + connect (p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail (index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del () { + deprecatedMethod('del', 'delete') + return this.delete + } + delete (k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + this.dispose(this.valList[index], k, 'delete') + if (this.disposeAfter) { + this.disposed.push([this.valList[index], k, 'delete']) + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size -- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear () { + if (this.dispose !== LRUCache.prototype.dispose) { + for (const index of this.rindexes({ allowStale: true })) { + this.dispose(this.valList[index], this.keyList[index], 'delete') + } + } + if (this.disposeAfter) { + for (const index of this.rindexes({ allowStale: true })) { + this.disposed.push([this.valList[index], this.keyList[index], 'delete']) + } + } + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + get reset () { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length () { + deprecatedProperty('length', 'size') + return this.size + } +} + +module.exports = LRUCache diff --git a/node_modules/init-package-json/node_modules/lru-cache/package.json b/node_modules/init-package-json/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..a62f74c2b648a --- /dev/null +++ b/node_modules/init-package-json/node_modules/lru-cache/package.json @@ -0,0 +1,43 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "7.5.1", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "build": "", + "test": "tap", + "snap": "tap", + "size": "size-limit", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "benchmark": "^2.1.4", + "size-limit": "^7.0.8", + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "index.js" + ], + "engines": { + "node": ">=12" + }, + "tap": { + "coverage-map": "map.js" + }, + "size-limit": [ + { + "path": "./index.js" + } + ] +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE b/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE new file mode 100644 index 0000000000000..19d1364a8ac08 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE @@ -0,0 +1,15 @@ +This package contains code originally written by Isaac Z. Schlueter. +Used with permission. + +Copyright (c) Meryn Stol ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js new file mode 100644 index 0000000000000..bf9896812e5f5 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js @@ -0,0 +1,22 @@ +module.exports = extractDescription + +// Extracts description from contents of a readme file in markdown format +function extractDescription (d) { + if (!d) { + return + } + if (d === 'ERROR: No README data found!') { + return + } + // the first block of text before the first heading + // that isn't the first line heading + d = d.trim().split('\n') + for (var s = 0; d[s] && d[s].trim().match(/^(#|$)/); s++) { + ; + } + var l = d.length + for (var e = s + 1; e < l && d[e].trim(); e++) { + ; + } + return d.slice(s, e).join(' ').trim() +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js new file mode 100644 index 0000000000000..0846f2c045a6e --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js @@ -0,0 +1,475 @@ +var isValidSemver = require('semver/functions/valid') +var cleanSemver = require('semver/functions/clean') +var validateLicense = require('validate-npm-package-license') +var hostedGitInfo = require('hosted-git-info') +var isBuiltinModule = require('is-core-module') +var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'] +var extractDescription = require('./extract_description') +var url = require('url') +var typos = require('./typos.json') + +var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.')) + +module.exports = { + // default warning function + warn: function () {}, + + fixRepositoryField: function (data) { + if (data.repositories) { + this.warn('repositories') + data.repository = data.repositories[0] + } + if (!data.repository) { + return this.warn('missingRepository') + } + if (typeof data.repository === 'string') { + data.repository = { + type: 'git', + url: data.repository, + } + } + var r = data.repository.url || '' + if (r) { + var hosted = hostedGitInfo.fromUrl(r) + if (hosted) { + r = data.repository.url + = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString() + } + } + + if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) { + this.warn('brokenGitUrl', r) + } + }, + + fixTypos: function (data) { + Object.keys(typos.topLevel).forEach(function (d) { + if (Object.prototype.hasOwnProperty.call(data, d)) { + this.warn('typo', d, typos.topLevel[d]) + } + }, this) + }, + + fixScriptsField: function (data) { + if (!data.scripts) { + return + } + if (typeof data.scripts !== 'object') { + this.warn('nonObjectScripts') + delete data.scripts + return + } + Object.keys(data.scripts).forEach(function (k) { + if (typeof data.scripts[k] !== 'string') { + this.warn('nonStringScript') + delete data.scripts[k] + } else if (typos.script[k] && !data.scripts[typos.script[k]]) { + this.warn('typo', k, typos.script[k], 'scripts') + } + }, this) + }, + + fixFilesField: function (data) { + var files = data.files + if (files && !Array.isArray(files)) { + this.warn('nonArrayFiles') + delete data.files + } else if (data.files) { + data.files = data.files.filter(function (file) { + if (!file || typeof file !== 'string') { + this.warn('invalidFilename', file) + return false + } else { + return true + } + }, this) + } + }, + + fixBinField: function (data) { + if (!data.bin) { + return + } + if (typeof data.bin === 'string') { + var b = {} + var match + if (match = data.name.match(/^@[^/]+[/](.*)$/)) { + b[match[1]] = data.bin + } else { + b[data.name] = data.bin + } + data.bin = b + } + }, + + fixManField: function (data) { + if (!data.man) { + return + } + if (typeof data.man === 'string') { + data.man = [data.man] + } + }, + fixBundleDependenciesField: function (data) { + var bdd = 'bundledDependencies' + var bd = 'bundleDependencies' + if (data[bdd] && !data[bd]) { + data[bd] = data[bdd] + delete data[bdd] + } + if (data[bd] && !Array.isArray(data[bd])) { + this.warn('nonArrayBundleDependencies') + delete data[bd] + } else if (data[bd]) { + data[bd] = data[bd].filter(function (bd) { + if (!bd || typeof bd !== 'string') { + this.warn('nonStringBundleDependency', bd) + return false + } else { + if (!data.dependencies) { + data.dependencies = {} + } + if (!Object.prototype.hasOwnProperty.call(data.dependencies, bd)) { + this.warn('nonDependencyBundleDependency', bd) + data.dependencies[bd] = '*' + } + return true + } + }, this) + } + }, + + fixDependencies: function (data, strict) { + objectifyDeps(data, this.warn) + addOptionalDepsToDeps(data, this.warn) + this.fixBundleDependenciesField(data) + + ;['dependencies', 'devDependencies'].forEach(function (deps) { + if (!(deps in data)) { + return + } + if (!data[deps] || typeof data[deps] !== 'object') { + this.warn('nonObjectDependencies', deps) + delete data[deps] + return + } + Object.keys(data[deps]).forEach(function (d) { + var r = data[deps][d] + if (typeof r !== 'string') { + this.warn('nonStringDependency', d, JSON.stringify(r)) + delete data[deps][d] + } + var hosted = hostedGitInfo.fromUrl(data[deps][d]) + if (hosted) { + data[deps][d] = hosted.toString() + } + }, this) + }, this) + }, + + fixModulesField: function (data) { + if (data.modules) { + this.warn('deprecatedModules') + delete data.modules + } + }, + + fixKeywordsField: function (data) { + if (typeof data.keywords === 'string') { + data.keywords = data.keywords.split(/,\s+/) + } + if (data.keywords && !Array.isArray(data.keywords)) { + delete data.keywords + this.warn('nonArrayKeywords') + } else if (data.keywords) { + data.keywords = data.keywords.filter(function (kw) { + if (typeof kw !== 'string' || !kw) { + this.warn('nonStringKeyword') + return false + } else { + return true + } + }, this) + } + }, + + fixVersionField: function (data, strict) { + // allow "loose" semver 1.0 versions in non-strict mode + // enforce strict semver 2.0 compliance in strict mode + var loose = !strict + if (!data.version) { + data.version = '' + return true + } + if (!isValidSemver(data.version, loose)) { + throw new Error('Invalid version: "' + data.version + '"') + } + data.version = cleanSemver(data.version, loose) + return true + }, + + fixPeople: function (data) { + modifyPeople(data, unParsePerson) + modifyPeople(data, parsePerson) + }, + + fixNameField: function (data, options) { + if (typeof options === 'boolean') { + options = { strict: options } + } else if (typeof options === 'undefined') { + options = {} + } + var strict = options.strict + if (!data.name && !strict) { + data.name = '' + return + } + if (typeof data.name !== 'string') { + throw new Error('name field must be a string.') + } + if (!strict) { + data.name = data.name.trim() + } + ensureValidName(data.name, strict, options.allowLegacyCase) + if (isBuiltinModule(data.name)) { + this.warn('conflictingName', data.name) + } + }, + + fixDescriptionField: function (data) { + if (data.description && typeof data.description !== 'string') { + this.warn('nonStringDescription') + delete data.description + } + if (data.readme && !data.description) { + data.description = extractDescription(data.readme) + } + if (data.description === undefined) { + delete data.description + } + if (!data.description) { + this.warn('missingDescription') + } + }, + + fixReadmeField: function (data) { + if (!data.readme) { + this.warn('missingReadme') + data.readme = 'ERROR: No README data found!' + } + }, + + fixBugsField: function (data) { + if (!data.bugs && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.bugs()) { + data.bugs = { url: hosted.bugs() } + } + } else if (data.bugs) { + if (typeof data.bugs === 'string') { + if (isEmail(data.bugs)) { + data.bugs = { email: data.bugs } + /* eslint-disable-next-line node/no-deprecated-api */ + } else if (url.parse(data.bugs).protocol) { + data.bugs = { url: data.bugs } + } else { + this.warn('nonEmailUrlBugsString') + } + } else { + bugsTypos(data.bugs, this.warn) + var oldBugs = data.bugs + data.bugs = {} + if (oldBugs.url) { + /* eslint-disable-next-line node/no-deprecated-api */ + if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) { + data.bugs.url = oldBugs.url + } else { + this.warn('nonUrlBugsUrlField') + } + } + if (oldBugs.email) { + if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) { + data.bugs.email = oldBugs.email + } else { + this.warn('nonEmailBugsEmailField') + } + } + } + if (!data.bugs.email && !data.bugs.url) { + delete data.bugs + this.warn('emptyNormalizedBugs') + } + } + }, + + fixHomepageField: function (data) { + if (!data.homepage && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.docs()) { + data.homepage = hosted.docs() + } + } + if (!data.homepage) { + return + } + + if (typeof data.homepage !== 'string') { + this.warn('nonUrlHomepage') + return delete data.homepage + } + /* eslint-disable-next-line node/no-deprecated-api */ + if (!url.parse(data.homepage).protocol) { + data.homepage = 'http://' + data.homepage + } + }, + + fixLicenseField: function (data) { + const license = data.license || data.licence + if (!license) { + return this.warn('missingLicense') + } + if ( + typeof (license) !== 'string' || + license.length < 1 || + license.trim() === '' + ) { + return this.warn('invalidLicense') + } + if (!validateLicense(license).validForNewPackages) { + return this.warn('invalidLicense') + } + }, +} + +function isValidScopedPackageName (spec) { + if (spec.charAt(0) !== '@') { + return false + } + + var rest = spec.slice(1).split('/') + if (rest.length !== 2) { + return false + } + + return rest[0] && rest[1] && + rest[0] === encodeURIComponent(rest[0]) && + rest[1] === encodeURIComponent(rest[1]) +} + +function isCorrectlyEncodedName (spec) { + return !spec.match(/[/@\s+%:]/) && + spec === encodeURIComponent(spec) +} + +function ensureValidName (name, strict, allowLegacyCase) { + if (name.charAt(0) === '.' || + !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) || + (strict && (!allowLegacyCase) && name !== name.toLowerCase()) || + name.toLowerCase() === 'node_modules' || + name.toLowerCase() === 'favicon.ico') { + throw new Error('Invalid name: ' + JSON.stringify(name)) + } +} + +function modifyPeople (data, fn) { + if (data.author) { + data.author = fn(data.author) + }['maintainers', 'contributors'].forEach(function (set) { + if (!Array.isArray(data[set])) { + return + } + data[set] = data[set].map(fn) + }) + return data +} + +function unParsePerson (person) { + if (typeof person === 'string') { + return person + } + var name = person.name || '' + var u = person.url || person.web + var url = u ? (' (' + u + ')') : '' + var e = person.email || person.mail + var email = e ? (' <' + e + '>') : '' + return name + email + url +} + +function parsePerson (person) { + if (typeof person !== 'string') { + return person + } + var name = person.match(/^([^(<]+)/) + var url = person.match(/\(([^()]+)\)/) + var email = person.match(/<([^<>]+)>/) + var obj = {} + if (name && name[0].trim()) { + obj.name = name[0].trim() + } + if (email) { + obj.email = email[1] + } + if (url) { + obj.url = url[1] + } + return obj +} + +function addOptionalDepsToDeps (data, warn) { + var o = data.optionalDependencies + if (!o) { + return + } + var d = data.dependencies || {} + Object.keys(o).forEach(function (k) { + d[k] = o[k] + }) + data.dependencies = d +} + +function depObjectify (deps, type, warn) { + if (!deps) { + return {} + } + if (typeof deps === 'string') { + deps = deps.trim().split(/[\n\r\s\t ,]+/) + } + if (!Array.isArray(deps)) { + return deps + } + warn('deprecatedArrayDependencies', type) + var o = {} + deps.filter(function (d) { + return typeof d === 'string' + }).forEach(function (d) { + d = d.trim().split(/(:?[@\s><=])/) + var dn = d.shift() + var dv = d.join('') + dv = dv.trim() + dv = dv.replace(/^@/, '') + o[dn] = dv + }) + return o +} + +function objectifyDeps (data, warn) { + depTypes.forEach(function (type) { + if (!data[type]) { + return + } + data[type] = depObjectify(data[type], type, warn) + }) +} + +function bugsTypos (bugs, warn) { + if (!bugs) { + return + } + Object.keys(bugs).forEach(function (k) { + if (typos.bugs[k]) { + warn('typo', k, typos.bugs[k], 'bugs') + bugs[typos.bugs[k]] = bugs[k] + delete bugs[k] + } + }) +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js new file mode 100644 index 0000000000000..3be9c86539952 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js @@ -0,0 +1,22 @@ +var util = require('util') +var messages = require('./warning_messages.json') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + var warningName = args.shift() + if (warningName === 'typo') { + return makeTypoWarning.apply(null, args) + } else { + var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'" + args.unshift(msgTemplate) + return util.format.apply(null, args) + } +} + +function makeTypoWarning (providedName, probableName, field) { + if (field) { + providedName = field + "['" + providedName + "']" + probableName = field + "['" + probableName + "']" + } + return util.format(messages.typo, providedName, probableName) +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js new file mode 100644 index 0000000000000..bf71d2c1e2235 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js @@ -0,0 +1,48 @@ +module.exports = normalize + +var fixer = require('./fixer') +normalize.fixer = fixer + +var makeWarning = require('./make_warning') + +var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts', + 'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license'] +var otherThingsToFix = ['dependencies', 'people', 'typos'] + +var thingsToFix = fieldsToFix.map(function (fieldName) { + return ucFirst(fieldName) + 'Field' +}) +// two ways to do this in CoffeeScript on only one line, sub-70 chars: +// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field" +// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix) +thingsToFix = thingsToFix.concat(otherThingsToFix) + +function normalize (data, warn, strict) { + if (warn === true) { + warn = null + strict = true + } + if (!strict) { + strict = false + } + if (!warn || data.private) { + warn = function (msg) { /* noop */ } + } + + if (data.scripts && + data.scripts.install === 'node-gyp rebuild' && + !data.scripts.preinstall) { + data.gypfile = true + } + fixer.warn = function () { + warn(makeWarning.apply(null, arguments)) + } + thingsToFix.forEach(function (thingName) { + fixer['fix' + ucFirst(thingName)](data, strict) + }) + data._id = data.name + '@' + data.version +} + +function ucFirst (string) { + return string.charAt(0).toUpperCase() + string.slice(1) +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js new file mode 100644 index 0000000000000..5fc888e5450cd --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js @@ -0,0 +1,11 @@ +var util = require('util') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.forEach(function (arg) { + if (!arg) { + throw new TypeError('Bad arguments.') + } + }) + return util.format.apply(null, arguments) +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json b/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json new file mode 100644 index 0000000000000..7f9dd283b30ff --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json @@ -0,0 +1,25 @@ +{ + "topLevel": { + "dependancies": "dependencies" + ,"dependecies": "dependencies" + ,"depdenencies": "dependencies" + ,"devEependencies": "devDependencies" + ,"depends": "dependencies" + ,"dev-dependencies": "devDependencies" + ,"devDependences": "devDependencies" + ,"devDepenencies": "devDependencies" + ,"devdependencies": "devDependencies" + ,"repostitory": "repository" + ,"repo": "repository" + ,"prefereGlobal": "preferGlobal" + ,"hompage": "homepage" + ,"hampage": "homepage" + ,"autohr": "author" + ,"autor": "author" + ,"contributers": "contributors" + ,"publicationConfig": "publishConfig" + ,"script": "scripts" + }, + "bugs": { "web": "url", "name": "url" }, + "script": { "server": "start", "tests": "test" } +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json new file mode 100644 index 0000000000000..4890f506ed965 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json @@ -0,0 +1,30 @@ +{ + "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field" + ,"missingRepository": "No repository field." + ,"brokenGitUrl": "Probably broken git url: %s" + ,"nonObjectScripts": "scripts must be an object" + ,"nonStringScript": "script values must be string commands" + ,"nonArrayFiles": "Invalid 'files' member" + ,"invalidFilename": "Invalid filename in 'files' list: %s" + ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names" + ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s" + ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s" + ,"nonObjectDependencies": "%s field must be an object" + ,"nonStringDependency": "Invalid dependency: %s %s" + ,"deprecatedArrayDependencies": "specifying %s as array is deprecated" + ,"deprecatedModules": "modules field is deprecated" + ,"nonArrayKeywords": "keywords should be an array of strings" + ,"nonStringKeyword": "keywords should be an array of strings" + ,"conflictingName": "%s is also the name of a node core module." + ,"nonStringDescription": "'description' field should be a string" + ,"missingDescription": "No description" + ,"missingReadme": "No README data" + ,"missingLicense": "No license field." + ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}" + ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted." + ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted." + ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted." + ,"nonUrlHomepage": "homepage field must be a string url. Deleted." + ,"invalidLicense": "license should be a valid SPDX license expression" + ,"typo": "%s should probably be %s." +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/package.json b/node_modules/init-package-json/node_modules/normalize-package-data/package.json new file mode 100644 index 0000000000000..a6f1244eb5a25 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/package.json @@ -0,0 +1,52 @@ +{ + "name": "normalize-package-data", + "version": "4.0.0", + "author": "GitHub Inc.", + "description": "Normalizes data that can be found in package.json files.", + "license": "BSD-2-Clause", + "repository": { + "type": "git", + "url": "git://github.com/npm/normalize-package-data.git" + }, + "main": "lib/normalize.js", + "scripts": { + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint '**/*.js'", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "snap": "tap" + }, + "dependencies": { + "hosted-git-info": "^5.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "devDependencies": { + "@npmcli/template-oss": "^2.9.2", + "tap": "^15.0.9" + }, + "files": [ + "bin", + "lib" + ], + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "version": "2.9.2" + }, + "tap": { + "branches": 86, + "functions": 92, + "lines": 86, + "statements": 86 + } +} diff --git a/node_modules/init-package-json/node_modules/read-package-json/LICENSE b/node_modules/init-package-json/node_modules/read-package-json/LICENSE new file mode 100644 index 0000000000000..052085c436514 --- /dev/null +++ b/node_modules/init-package-json/node_modules/read-package-json/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js b/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js new file mode 100644 index 0000000000000..d0ee9af1ae750 --- /dev/null +++ b/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js @@ -0,0 +1,605 @@ +var fs = require('fs') + +var path = require('path') + +var glob = require('glob') +var normalizeData = require('normalize-package-data') +var safeJSON = require('json-parse-even-better-errors') +var util = require('util') +var normalizePackageBin = require('npm-normalize-package-bin') + +module.exports = readJson + +// put more stuff on here to customize. +readJson.extraSet = [ + bundleDependencies, + gypfile, + serverjs, + scriptpath, + authors, + readme, + mans, + bins, + githead, + fillTypes, +] + +var typoWarned = {} +var cache = {} + +function readJson (file, log_, strict_, cb_) { + var log, strict, cb + for (var i = 1; i < arguments.length - 1; i++) { + if (typeof arguments[i] === 'boolean') { + strict = arguments[i] + } else if (typeof arguments[i] === 'function') { + log = arguments[i] + } + } + + if (!log) { + log = function () {} + } + cb = arguments[arguments.length - 1] + + readJson_(file, log, strict, cb) +} + +function readJson_ (file, log, strict, cb) { + fs.readFile(file, 'utf8', function (er, d) { + parseJson(file, er, d, log, strict, cb) + }) +} + +function stripBOM (content) { + // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) + // because the buffer-to-string conversion in `fs.readFileSync()` + // translates it to FEFF, the UTF-16 BOM. + if (content.charCodeAt(0) === 0xFEFF) { + content = content.slice(1) + } + return content +} + +function jsonClone (obj) { + if (obj == null) { + return obj + } else if (Array.isArray(obj)) { + var newarr = new Array(obj.length) + for (var ii in obj) { + newarr[ii] = obj[ii] + } + } else if (typeof obj === 'object') { + var newobj = {} + for (var kk in obj) { + newobj[kk] = jsonClone[kk] + } + } else { + return obj + } +} + +function parseJson (file, er, d, log, strict, cb) { + if (er && er.code === 'ENOENT') { + return fs.stat(path.dirname(file), function (err, stat) { + if (!err && stat && !stat.isDirectory()) { + // ENOTDIR isn't used on Windows, but npm expects it. + er = Object.create(er) + er.code = 'ENOTDIR' + return cb(er) + } else { + return indexjs(file, er, log, strict, cb) + } + }) + } + if (er) { + return cb(er) + } + + if (cache[d]) { + return cb(null, jsonClone(cache[d])) + } + + var data + + try { + data = safeJSON(stripBOM(d)) + for (var key in data) { + if (/^_/.test(key)) { + delete data[key] + } + } + } catch (er) { + data = parseIndex(d) + if (!data) { + return cb(parseError(er, file)) + } + } + + extrasCached(file, d, data, log, strict, cb) +} + +function extrasCached (file, d, data, log, strict, cb) { + extras(file, data, log, strict, function (err, data) { + if (!err) { + cache[d] = jsonClone(data) + } + cb(err, data) + }) +} + +function indexjs (file, er, log, strict, cb) { + if (path.basename(file) === 'index.js') { + return cb(er) + } + + var index = path.resolve(path.dirname(file), 'index.js') + fs.readFile(index, 'utf8', function (er2, d) { + if (er2) { + return cb(er) + } + + if (cache[d]) { + return cb(null, cache[d]) + } + + var data = parseIndex(d) + if (!data) { + return cb(er) + } + + extrasCached(file, d, data, log, strict, cb) + }) +} + +readJson.extras = extras +function extras (file, data, log_, strict_, cb_) { + var log, strict, cb + for (var i = 2; i < arguments.length - 1; i++) { + if (typeof arguments[i] === 'boolean') { + strict = arguments[i] + } else if (typeof arguments[i] === 'function') { + log = arguments[i] + } + } + + if (!log) { + log = function () {} + } + cb = arguments[i] + + var set = readJson.extraSet + var n = set.length + var errState = null + set.forEach(function (fn) { + fn(file, data, then) + }) + + function then (er) { + if (errState) { + return + } + if (er) { + return cb(errState = er) + } + if (--n > 0) { + return + } + final(file, data, log, strict, cb) + } +} + +function scriptpath (file, data, cb) { + if (!data.scripts) { + return cb(null, data) + } + var k = Object.keys(data.scripts) + k.forEach(scriptpath_, data.scripts) + cb(null, data) +} + +function scriptpath_ (key) { + var s = this[key] + // This is never allowed, and only causes problems + if (typeof s !== 'string') { + return delete this[key] + } + + var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ + if (s.match(spre)) { + this[key] = this[key].replace(spre, '') + } +} + +function gypfile (file, data, cb) { + var dir = path.dirname(file) + var s = data.scripts || {} + if (s.install || s.preinstall) { + return cb(null, data) + } + + glob('*.gyp', { cwd: dir }, function (er, files) { + if (er) { + return cb(er) + } + if (data.gypfile === false) { + return cb(null, data) + } + gypfile_(file, data, files, cb) + }) +} + +function gypfile_ (file, data, files, cb) { + if (!files.length) { + return cb(null, data) + } + var s = data.scripts || {} + s.install = 'node-gyp rebuild' + data.scripts = s + data.gypfile = true + return cb(null, data) +} + +function serverjs (file, data, cb) { + var dir = path.dirname(file) + var s = data.scripts || {} + if (s.start) { + return cb(null, data) + } + glob('server.js', { cwd: dir }, function (er, files) { + if (er) { + return cb(er) + } + serverjs_(file, data, files, cb) + }) +} + +function serverjs_ (file, data, files, cb) { + if (!files.length) { + return cb(null, data) + } + var s = data.scripts || {} + s.start = 'node server.js' + data.scripts = s + return cb(null, data) +} + +function authors (file, data, cb) { + if (data.contributors) { + return cb(null, data) + } + var af = path.resolve(path.dirname(file), 'AUTHORS') + fs.readFile(af, 'utf8', function (er, ad) { + // ignore error. just checking it. + if (er) { + return cb(null, data) + } + authors_(file, data, ad, cb) + }) +} + +function authors_ (file, data, ad, cb) { + ad = ad.split(/\r?\n/g).map(function (line) { + return line.replace(/^\s*#.*$/, '').trim() + }).filter(function (line) { + return line + }) + data.contributors = ad + return cb(null, data) +} + +function readme (file, data, cb) { + if (data.readme) { + return cb(null, data) + } + var dir = path.dirname(file) + var globOpts = { cwd: dir, nocase: true, mark: true } + glob('{README,README.*}', globOpts, function (er, files) { + if (er) { + return cb(er) + } + // don't accept directories. + files = files.filter(function (file) { + return !file.match(/\/$/) + }) + if (!files.length) { + return cb() + } + var fn = preferMarkdownReadme(files) + var rm = path.resolve(dir, fn) + readme_(file, data, rm, cb) + }) +} + +function preferMarkdownReadme (files) { + var fallback = 0 + var re = /\.m?a?r?k?d?o?w?n?$/i + for (var i = 0; i < files.length; i++) { + if (files[i].match(re)) { + return files[i] + } else if (files[i].match(/README$/)) { + fallback = i + } + } + // prefer README.md, followed by README; otherwise, return + // the first filename (which could be README) + return files[fallback] +} + +function readme_ (file, data, rm, cb) { + var rmfn = path.basename(rm) + fs.readFile(rm, 'utf8', function (er, rm) { + // maybe not readable, or something. + if (er) { + return cb() + } + data.readme = rm + data.readmeFilename = rmfn + return cb(er, data) + }) +} + +function mans (file, data, cb) { + let cwd = data.directories && data.directories.man + if (data.man || !cwd) { + return cb(null, data) + } + const dirname = path.dirname(file) + cwd = path.resolve(path.dirname(file), cwd) + glob('**/*.[0-9]', { cwd }, function (er, mans) { + if (er) { + return cb(er) + } + data.man = mans.map(man => + path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/') + ) + return cb(null, data) + }) +} + +function bins (file, data, cb) { + data = normalizePackageBin(data) + + var m = data.directories && data.directories.bin + if (data.bin || !m) { + return cb(null, data) + } + + m = path.resolve(path.dirname(file), m) + glob('**', { cwd: m }, function (er, bins) { + if (er) { + return cb(er) + } + bins_(file, data, bins, cb) + }) +} + +function bins_ (file, data, bins, cb) { + var m = (data.directories && data.directories.bin) || '.' + data.bin = bins.reduce(function (acc, mf) { + if (mf && mf.charAt(0) !== '.') { + var f = path.basename(mf) + acc[f] = path.join(m, mf) + } + return acc + }, {}) + return cb(null, normalizePackageBin(data)) +} + +function bundleDependencies (file, data, cb) { + var bd = 'bundleDependencies' + var bdd = 'bundledDependencies' + // normalize key name + if (data[bdd] !== undefined) { + if (data[bd] === undefined) { + data[bd] = data[bdd] + } + delete data[bdd] + } + if (data[bd] === false) { + delete data[bd] + } else if (data[bd] === true) { + data[bd] = Object.keys(data.dependencies || {}) + } else if (data[bd] !== undefined && !Array.isArray(data[bd])) { + delete data[bd] + } + return cb(null, data) +} + +function githead (file, data, cb) { + if (data.gitHead) { + return cb(null, data) + } + var dir = path.dirname(file) + var head = path.resolve(dir, '.git/HEAD') + fs.readFile(head, 'utf8', function (er, head) { + if (er) { + var parent = path.dirname(dir) + if (parent === dir) { + return cb(null, data) + } + return githead(dir, data, cb) + } + githead_(data, dir, head, cb) + }) +} + +function githead_ (data, dir, head, cb) { + if (!head.match(/^ref: /)) { + data.gitHead = head.trim() + return cb(null, data) + } + var headRef = head.replace(/^ref: /, '').trim() + var headFile = path.resolve(dir, '.git', headRef) + fs.readFile(headFile, 'utf8', function (er, head) { + if (er || !head) { + var packFile = path.resolve(dir, '.git/packed-refs') + return fs.readFile(packFile, 'utf8', function (er, refs) { + if (er || !refs) { + return cb(null, data) + } + refs = refs.split('\n') + for (var i = 0; i < refs.length; i++) { + var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/) + if (match && match[2].trim() === headRef) { + data.gitHead = match[1] + break + } + } + return cb(null, data) + }) + } + head = head.replace(/^ref: /, '').trim() + data.gitHead = head + return cb(null, data) + }) +} + +/** + * Warn if the bin references don't point to anything. This might be better in + * normalize-package-data if it had access to the file path. + */ +function checkBinReferences_ (file, data, warn, cb) { + if (!(data.bin instanceof Object)) { + return cb() + } + + var keys = Object.keys(data.bin) + var keysLeft = keys.length + if (!keysLeft) { + return cb() + } + + function handleExists (relName, result) { + keysLeft-- + if (!result) { + warn('No bin file found at ' + relName) + } + if (!keysLeft) { + cb() + } + } + + keys.forEach(function (key) { + var dirName = path.dirname(file) + var relName = data.bin[key] + /* istanbul ignore if - impossible, bins have been normalized */ + if (typeof relName !== 'string') { + var msg = 'Bin filename for ' + key + + ' is not a string: ' + util.inspect(relName) + warn(msg) + delete data.bin[key] + handleExists(relName, true) + return + } + var binPath = path.resolve(dirName, relName) + fs.stat(binPath, (err) => handleExists(relName, !err)) + }) +} + +function final (file, data, log, strict, cb) { + var pId = makePackageId(data) + + function warn (msg) { + if (typoWarned[pId]) { + return + } + if (log) { + log('package.json', pId, msg) + } + } + + try { + normalizeData(data, warn, strict) + } catch (error) { + return cb(error) + } + + checkBinReferences_(file, data, warn, function () { + typoWarned[pId] = true + cb(null, data) + }) +} + +function fillTypes (file, data, cb) { + var index = data.main ? data.main : 'index.js' + + if (typeof index !== 'string') { + return cb(new TypeError('The "main" attribute must be of type string.')) + } + + // TODO exports is much more complicated than this in verbose format + // We need to support for instance + + // "exports": { + // ".": [ + // { + // "default": "./lib/npm.js" + // }, + // "./lib/npm.js" + // ], + // "./package.json": "./package.json" + // }, + // as well as conditional exports + + // if (data.exports && typeof data.exports === 'string') { + // index = data.exports + // } + + // if (data.exports && data.exports['.']) { + // index = data.exports['.'] + // if (typeof index !== 'string') { + // } + // } + + var extless = + path.join(path.dirname(index), path.basename(index, path.extname(index))) + var dts = `./${extless}.d.ts` + var dtsPath = path.join(path.dirname(file), dts) + var hasDTSFields = 'types' in data || 'typings' in data + if (!hasDTSFields && fs.existsSync(dtsPath)) { + data.types = dts.split(path.sep).join('/') + } + + cb(null, data) +} + +function makePackageId (data) { + var name = cleanString(data.name) + var ver = cleanString(data.version) + return name + '@' + ver +} + +function cleanString (str) { + return (!str || typeof (str) !== 'string') ? '' : str.trim() +} + +// /**package { "name": "foo", "version": "1.2.3", ... } **/ +function parseIndex (data) { + data = data.split(/^\/\*\*package(?:\s|$)/m) + + if (data.length < 2) { + return null + } + data = data[1] + data = data.split(/\*\*\/$/m) + + if (data.length < 2) { + return null + } + data = data[0] + data = data.replace(/^\s*\*/mg, '') + + try { + return safeJSON(data) + } catch (er) { + return null + } +} + +function parseError (ex, file) { + var e = new Error('Failed to parse json\n' + ex.message) + e.code = 'EJSONPARSE' + e.path = file + return e +} diff --git a/node_modules/init-package-json/node_modules/read-package-json/package.json b/node_modules/init-package-json/node_modules/read-package-json/package.json new file mode 100644 index 0000000000000..038047c970941 --- /dev/null +++ b/node_modules/init-package-json/node_modules/read-package-json/package.json @@ -0,0 +1,55 @@ +{ + "name": "read-package-json", + "version": "5.0.0", + "author": "GitHub Inc.", + "description": "The thing npm uses to read package.json files with semantics and defaults and validation", + "repository": { + "type": "git", + "url": "https://github.com/npm/read-package-json.git" + }, + "main": "lib/read-json.js", + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish && git push --follow-tags", + "release": "standard-version -s", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint '**/*.js'", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap" + }, + "dependencies": { + "glob": "^7.2.0", + "json-parse-even-better-errors": "^2.3.1", + "normalize-package-data": "^4.0.0", + "npm-normalize-package-bin": "^1.0.1" + }, + "devDependencies": { + "@npmcli/template-oss": "^2.9.2", + "tap": "^15.0.9" + }, + "license": "ISC", + "files": [ + "bin", + "lib" + ], + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "tap": { + "branches": 68, + "functions": 83, + "lines": 76, + "statements": 77 + }, + "templateOSS": { + "version": "2.9.2" + } +} diff --git a/node_modules/init-package-json/package.json b/node_modules/init-package-json/package.json index 7649c503de815..9219863cb6720 100644 --- a/node_modules/init-package-json/package.json +++ b/node_modules/init-package-json/package.json @@ -1,6 +1,6 @@ { "name": "init-package-json", - "version": "3.0.0", + "version": "3.0.1", "main": "lib/init-package-json.js", "scripts": { "test": "tap", @@ -25,14 +25,14 @@ "npm-package-arg": "^9.0.0", "promzard": "^0.3.0", "read": "^1.0.7", - "read-package-json": "^4.1.1", + "read-package-json": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" }, "devDependencies": { - "@npmcli/config": "^4.0.0", - "@npmcli/template-oss": "^2.7.1", + "@npmcli/config": "^4.0.1", + "@npmcli/template-oss": "^2.9.2", "tap": "^15.1.6" }, "engines": { @@ -58,6 +58,6 @@ "lib" ], "templateOSS": { - "version": "2.7.1" + "version": "2.9.2" } } diff --git a/package-lock.json b/package-lock.json index dff028a83a400..74e5671df9c41 100644 --- a/package-lock.json +++ b/package-lock.json @@ -108,7 +108,7 @@ "graceful-fs": "^4.2.9", "hosted-git-info": "^4.1.0", "ini": "^2.0.0", - "init-package-json": "^3.0.0", + "init-package-json": "^3.0.1", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^2.3.1", "libnpmaccess": "^6.0.1", @@ -3999,15 +3999,15 @@ } }, "node_modules/init-package-json": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-3.0.0.tgz", - "integrity": "sha512-b0PZaZ3lF0mKsk7QcP03LhxXttVR0kb4XIafD1HXV4JIvLhifdvFgNyXr3qSA/3DZmiskFveLP1eXfXGFybG6g==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-3.0.1.tgz", + "integrity": "sha512-TGY3Ouc/zKzanMEfA4v/4b+kaCYqMrdriQQ0iPktKeIcoIkejULFt1ounGWFoJwq2cbdCwNxZsp8vaUhuz1caQ==", "inBundle": true, "dependencies": { "npm-package-arg": "^9.0.0", "promzard": "^0.3.0", "read": "^1.0.7", - "read-package-json": "^4.1.1", + "read-package-json": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" @@ -4016,6 +4016,57 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, + "node_modules/init-package-json/node_modules/hosted-git-info": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", + "inBundle": true, + "dependencies": { + "lru-cache": "^7.5.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, + "node_modules/init-package-json/node_modules/lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/init-package-json/node_modules/normalize-package-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", + "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", + "inBundle": true, + "dependencies": { + "hosted-git-info": "^5.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, + "node_modules/init-package-json/node_modules/read-package-json": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-5.0.0.tgz", + "integrity": "sha512-1cjk2MV5ONDMn34uxSID3X8NY7VKsXfJnjbcVdFMvHEnJOBzU6MJ7/3yg6QFVZDq5/1yFNrKBUK9kGnonyGP2Q==", + "inBundle": true, + "dependencies": { + "glob": "^7.2.0", + "json-parse-even-better-errors": "^2.3.1", + "normalize-package-data": "^4.0.0", + "npm-normalize-package-bin": "^1.0.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, "node_modules/inline-style-parser": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", @@ -11341,7 +11392,7 @@ "@npmcli/template-oss": "^2.4.2", "benchmark": "^2.1.4", "bin-links": "^3.0.0", - "cacache": "16.0.0", + "cacache": "^16.0.0", "chalk": "^4.1.0", "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", @@ -13736,17 +13787,54 @@ "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==" }, "init-package-json": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-3.0.0.tgz", - "integrity": "sha512-b0PZaZ3lF0mKsk7QcP03LhxXttVR0kb4XIafD1HXV4JIvLhifdvFgNyXr3qSA/3DZmiskFveLP1eXfXGFybG6g==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-3.0.1.tgz", + "integrity": "sha512-TGY3Ouc/zKzanMEfA4v/4b+kaCYqMrdriQQ0iPktKeIcoIkejULFt1ounGWFoJwq2cbdCwNxZsp8vaUhuz1caQ==", "requires": { "npm-package-arg": "^9.0.0", "promzard": "^0.3.0", "read": "^1.0.7", - "read-package-json": "^4.1.1", + "read-package-json": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" + }, + "dependencies": { + "hosted-git-info": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", + "requires": { + "lru-cache": "^7.5.1" + } + }, + "lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" + }, + "normalize-package-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", + "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", + "requires": { + "hosted-git-info": "^5.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + } + }, + "read-package-json": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-5.0.0.tgz", + "integrity": "sha512-1cjk2MV5ONDMn34uxSID3X8NY7VKsXfJnjbcVdFMvHEnJOBzU6MJ7/3yg6QFVZDq5/1yFNrKBUK9kGnonyGP2Q==", + "requires": { + "glob": "^7.2.0", + "json-parse-even-better-errors": "^2.3.1", + "normalize-package-data": "^4.0.0", + "npm-normalize-package-bin": "^1.0.1" + } + } } }, "inline-style-parser": { diff --git a/package.json b/package.json index 8cf2d111d6397..834be1ab93908 100644 --- a/package.json +++ b/package.json @@ -76,7 +76,7 @@ "graceful-fs": "^4.2.9", "hosted-git-info": "^4.1.0", "ini": "^2.0.0", - "init-package-json": "^3.0.0", + "init-package-json": "^3.0.1", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^2.3.1", "libnpmaccess": "^6.0.1", From f5c38e6a59982091440f0767c0f0c5c32f3e57a2 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 15 Mar 2022 12:47:13 -0700 Subject: [PATCH 06/11] deps: read-package-json@5.0.0 --- .../read-package-json/package.json | 55 ----- .../node_modules/read-package-json/LICENSE | 0 .../read-package-json/package.json | 46 ++++ .../read-package-json/read-json.js | 0 .../read-package-json/lib/read-json.js | 0 .../node_modules/hosted-git-info/LICENSE | 0 .../hosted-git-info/lib/git-host-info.js | 0 .../hosted-git-info/lib/git-host.js | 0 .../node_modules/hosted-git-info/lib/index.js | 0 .../node_modules/hosted-git-info/package.json | 0 .../node_modules/lru-cache/LICENSE | 0 .../node_modules/lru-cache/index.js | 0 .../node_modules/lru-cache/package.json | 0 .../normalize-package-data/LICENSE | 0 .../lib/extract_description.js | 0 .../normalize-package-data/lib/fixer.js | 0 .../lib/make_warning.js | 0 .../normalize-package-data/lib/normalize.js | 0 .../normalize-package-data/lib/safe_format.js | 0 .../normalize-package-data/lib/typos.json | 0 .../lib/warning_messages.json | 0 .../normalize-package-data/package.json | 0 node_modules/read-package-json/package.json | 39 ++-- package-lock.json | 210 +++++++++--------- package.json | 2 +- 25 files changed, 177 insertions(+), 175 deletions(-) delete mode 100644 node_modules/init-package-json/node_modules/read-package-json/package.json rename node_modules/{init-package-json => pacote}/node_modules/read-package-json/LICENSE (100%) create mode 100644 node_modules/pacote/node_modules/read-package-json/package.json rename node_modules/{ => pacote/node_modules}/read-package-json/read-json.js (100%) rename node_modules/{init-package-json/node_modules => }/read-package-json/lib/read-json.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/hosted-git-info/LICENSE (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/hosted-git-info/lib/git-host-info.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/hosted-git-info/lib/git-host.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/hosted-git-info/lib/index.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/hosted-git-info/package.json (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/lru-cache/LICENSE (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/lru-cache/index.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/lru-cache/package.json (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/LICENSE (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/lib/extract_description.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/lib/fixer.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/lib/make_warning.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/lib/normalize.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/lib/safe_format.js (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/lib/typos.json (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/lib/warning_messages.json (100%) rename node_modules/{init-package-json => read-package-json}/node_modules/normalize-package-data/package.json (100%) diff --git a/node_modules/init-package-json/node_modules/read-package-json/package.json b/node_modules/init-package-json/node_modules/read-package-json/package.json deleted file mode 100644 index 038047c970941..0000000000000 --- a/node_modules/init-package-json/node_modules/read-package-json/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "read-package-json", - "version": "5.0.0", - "author": "GitHub Inc.", - "description": "The thing npm uses to read package.json files with semantics and defaults and validation", - "repository": { - "type": "git", - "url": "https://github.com/npm/read-package-json.git" - }, - "main": "lib/read-json.js", - "scripts": { - "prerelease": "npm t", - "postrelease": "npm publish && git push --follow-tags", - "release": "standard-version -s", - "test": "tap", - "npmclilint": "npmcli-lint", - "lint": "eslint '**/*.js'", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "postsnap": "npm run lintfix --", - "postlint": "npm-template-check", - "template-copy": "npm-template-copy --force", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "snap": "tap" - }, - "dependencies": { - "glob": "^7.2.0", - "json-parse-even-better-errors": "^2.3.1", - "normalize-package-data": "^4.0.0", - "npm-normalize-package-bin": "^1.0.1" - }, - "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "tap": "^15.0.9" - }, - "license": "ISC", - "files": [ - "bin", - "lib" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - }, - "tap": { - "branches": 68, - "functions": 83, - "lines": 76, - "statements": 77 - }, - "templateOSS": { - "version": "2.9.2" - } -} diff --git a/node_modules/init-package-json/node_modules/read-package-json/LICENSE b/node_modules/pacote/node_modules/read-package-json/LICENSE similarity index 100% rename from node_modules/init-package-json/node_modules/read-package-json/LICENSE rename to node_modules/pacote/node_modules/read-package-json/LICENSE diff --git a/node_modules/pacote/node_modules/read-package-json/package.json b/node_modules/pacote/node_modules/read-package-json/package.json new file mode 100644 index 0000000000000..c86cf45f36d55 --- /dev/null +++ b/node_modules/pacote/node_modules/read-package-json/package.json @@ -0,0 +1,46 @@ +{ + "name": "read-package-json", + "version": "4.1.2", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "description": "The thing npm uses to read package.json files with semantics and defaults and validation", + "repository": { + "type": "git", + "url": "https://github.com/npm/read-package-json.git" + }, + "main": "read-json.js", + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish && git push --follow-tags", + "release": "standard-version -s", + "test": "tap --nyc-arg=--all --coverage test/*.js", + "npmclilint": "npmcli-lint", + "lint": "npm run npmclilint -- --ignore-pattern test/fixtures \"*.*js\" \"test/**/*.*js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint --", + "postsnap": "npm run lintfix --" + }, + "dependencies": { + "glob": "^7.1.1", + "json-parse-even-better-errors": "^2.3.0", + "normalize-package-data": "^3.0.0", + "npm-normalize-package-bin": "^1.0.0" + }, + "devDependencies": { + "@npmcli/lint": "^1.0.2", + "standard-version": "^9.3.1", + "tap": "^15.0.9" + }, + "license": "ISC", + "files": [ + "read-json.js" + ], + "engines": { + "node": ">=10" + }, + "tap": { + "branches": 68, + "functions": 83, + "lines": 76, + "statements": 77 + } +} diff --git a/node_modules/read-package-json/read-json.js b/node_modules/pacote/node_modules/read-package-json/read-json.js similarity index 100% rename from node_modules/read-package-json/read-json.js rename to node_modules/pacote/node_modules/read-package-json/read-json.js diff --git a/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js b/node_modules/read-package-json/lib/read-json.js similarity index 100% rename from node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js rename to node_modules/read-package-json/lib/read-json.js diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE b/node_modules/read-package-json/node_modules/hosted-git-info/LICENSE similarity index 100% rename from node_modules/init-package-json/node_modules/hosted-git-info/LICENSE rename to node_modules/read-package-json/node_modules/hosted-git-info/LICENSE diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host-info.js b/node_modules/read-package-json/node_modules/hosted-git-info/lib/git-host-info.js similarity index 100% rename from node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host-info.js rename to node_modules/read-package-json/node_modules/hosted-git-info/lib/git-host-info.js diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host.js b/node_modules/read-package-json/node_modules/hosted-git-info/lib/git-host.js similarity index 100% rename from node_modules/init-package-json/node_modules/hosted-git-info/lib/git-host.js rename to node_modules/read-package-json/node_modules/hosted-git-info/lib/git-host.js diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/read-package-json/node_modules/hosted-git-info/lib/index.js similarity index 100% rename from node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js rename to node_modules/read-package-json/node_modules/hosted-git-info/lib/index.js diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/package.json b/node_modules/read-package-json/node_modules/hosted-git-info/package.json similarity index 100% rename from node_modules/init-package-json/node_modules/hosted-git-info/package.json rename to node_modules/read-package-json/node_modules/hosted-git-info/package.json diff --git a/node_modules/init-package-json/node_modules/lru-cache/LICENSE b/node_modules/read-package-json/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/init-package-json/node_modules/lru-cache/LICENSE rename to node_modules/read-package-json/node_modules/lru-cache/LICENSE diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.js b/node_modules/read-package-json/node_modules/lru-cache/index.js similarity index 100% rename from node_modules/init-package-json/node_modules/lru-cache/index.js rename to node_modules/read-package-json/node_modules/lru-cache/index.js diff --git a/node_modules/init-package-json/node_modules/lru-cache/package.json b/node_modules/read-package-json/node_modules/lru-cache/package.json similarity index 100% rename from node_modules/init-package-json/node_modules/lru-cache/package.json rename to node_modules/read-package-json/node_modules/lru-cache/package.json diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE b/node_modules/read-package-json/node_modules/normalize-package-data/LICENSE similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/LICENSE rename to node_modules/read-package-json/node_modules/normalize-package-data/LICENSE diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/extract_description.js similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js rename to node_modules/read-package-json/node_modules/normalize-package-data/lib/extract_description.js diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/fixer.js similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js rename to node_modules/read-package-json/node_modules/normalize-package-data/lib/fixer.js diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/make_warning.js similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js rename to node_modules/read-package-json/node_modules/normalize-package-data/lib/make_warning.js diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/normalize.js similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js rename to node_modules/read-package-json/node_modules/normalize-package-data/lib/normalize.js diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/safe_format.js similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js rename to node_modules/read-package-json/node_modules/normalize-package-data/lib/safe_format.js diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json b/node_modules/read-package-json/node_modules/normalize-package-data/lib/typos.json similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json rename to node_modules/read-package-json/node_modules/normalize-package-data/lib/typos.json diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/read-package-json/node_modules/normalize-package-data/lib/warning_messages.json similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json rename to node_modules/read-package-json/node_modules/normalize-package-data/lib/warning_messages.json diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/package.json b/node_modules/read-package-json/node_modules/normalize-package-data/package.json similarity index 100% rename from node_modules/init-package-json/node_modules/normalize-package-data/package.json rename to node_modules/read-package-json/node_modules/normalize-package-data/package.json diff --git a/node_modules/read-package-json/package.json b/node_modules/read-package-json/package.json index c86cf45f36d55..038047c970941 100644 --- a/node_modules/read-package-json/package.json +++ b/node_modules/read-package-json/package.json @@ -1,46 +1,55 @@ { "name": "read-package-json", - "version": "4.1.2", - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "version": "5.0.0", + "author": "GitHub Inc.", "description": "The thing npm uses to read package.json files with semantics and defaults and validation", "repository": { "type": "git", "url": "https://github.com/npm/read-package-json.git" }, - "main": "read-json.js", + "main": "lib/read-json.js", "scripts": { "prerelease": "npm t", "postrelease": "npm publish && git push --follow-tags", "release": "standard-version -s", - "test": "tap --nyc-arg=--all --coverage test/*.js", + "test": "tap", "npmclilint": "npmcli-lint", - "lint": "npm run npmclilint -- --ignore-pattern test/fixtures \"*.*js\" \"test/**/*.*js\"", + "lint": "eslint '**/*.js'", "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint --", - "postsnap": "npm run lintfix --" + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap" }, "dependencies": { - "glob": "^7.1.1", - "json-parse-even-better-errors": "^2.3.0", - "normalize-package-data": "^3.0.0", - "npm-normalize-package-bin": "^1.0.0" + "glob": "^7.2.0", + "json-parse-even-better-errors": "^2.3.1", + "normalize-package-data": "^4.0.0", + "npm-normalize-package-bin": "^1.0.1" }, "devDependencies": { - "@npmcli/lint": "^1.0.2", - "standard-version": "^9.3.1", + "@npmcli/template-oss": "^2.9.2", "tap": "^15.0.9" }, "license": "ISC", "files": [ - "read-json.js" + "bin", + "lib" ], "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" }, "tap": { "branches": 68, "functions": 83, "lines": 76, "statements": 77 + }, + "templateOSS": { + "version": "2.9.2" } } diff --git a/package-lock.json b/package-lock.json index 74e5671df9c41..5d70f3f4da3ba 100644 --- a/package-lock.json +++ b/package-lock.json @@ -144,7 +144,7 @@ "proc-log": "^2.0.0", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", - "read-package-json": "^4.1.2", + "read-package-json": "^5.0.0", "read-package-json-fast": "^2.0.3", "readdir-scoped-modules": "^1.1.0", "rimraf": "^3.0.2", @@ -4016,57 +4016,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/init-package-json/node_modules/hosted-git-info": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", - "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", - "inBundle": true, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, - "node_modules/init-package-json/node_modules/lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/init-package-json/node_modules/normalize-package-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", - "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", - "inBundle": true, - "dependencies": { - "hosted-git-info": "^5.0.0", - "is-core-module": "^2.8.1", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, - "node_modules/init-package-json/node_modules/read-package-json": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-5.0.0.tgz", - "integrity": "sha512-1cjk2MV5ONDMn34uxSID3X8NY7VKsXfJnjbcVdFMvHEnJOBzU6MJ7/3yg6QFVZDq5/1yFNrKBUK9kGnonyGP2Q==", - "inBundle": true, - "dependencies": { - "glob": "^7.2.0", - "json-parse-even-better-errors": "^2.3.1", - "normalize-package-data": "^4.0.0", - "npm-normalize-package-bin": "^1.0.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, "node_modules/inline-style-parser": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", @@ -6014,6 +5963,21 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, + "node_modules/pacote/node_modules/read-package-json": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-4.1.2.tgz", + "integrity": "sha512-Dqer4pqzamDE2O4M55xp1qZMuLPqi4ldk2ya648FOMHRjwMzFhuxVrG04wd0c38IsvkVdr3vgHI6z+QTPdAjrQ==", + "inBundle": true, + "dependencies": { + "glob": "^7.1.1", + "json-parse-even-better-errors": "^2.3.0", + "normalize-package-data": "^3.0.0", + "npm-normalize-package-bin": "^1.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -6485,18 +6449,18 @@ "integrity": "sha512-HJpV9bQpkl6KwjxlJcBoqu9Ba0PQg8TqSNIOrulGt54a0uup0HtevreFHzYzkm0lpnleRdNBzXznKrgxglEHQw==" }, "node_modules/read-package-json": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-4.1.2.tgz", - "integrity": "sha512-Dqer4pqzamDE2O4M55xp1qZMuLPqi4ldk2ya648FOMHRjwMzFhuxVrG04wd0c38IsvkVdr3vgHI6z+QTPdAjrQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-5.0.0.tgz", + "integrity": "sha512-1cjk2MV5ONDMn34uxSID3X8NY7VKsXfJnjbcVdFMvHEnJOBzU6MJ7/3yg6QFVZDq5/1yFNrKBUK9kGnonyGP2Q==", "inBundle": true, "dependencies": { - "glob": "^7.1.1", - "json-parse-even-better-errors": "^2.3.0", - "normalize-package-data": "^3.0.0", - "npm-normalize-package-bin": "^1.0.0" + "glob": "^7.2.0", + "json-parse-even-better-errors": "^2.3.1", + "normalize-package-data": "^4.0.0", + "npm-normalize-package-bin": "^1.0.1" }, "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" } }, "node_modules/read-package-json-fast": { @@ -6512,6 +6476,42 @@ "node": ">=10" } }, + "node_modules/read-package-json/node_modules/hosted-git-info": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", + "inBundle": true, + "dependencies": { + "lru-cache": "^7.5.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, + "node_modules/read-package-json/node_modules/lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/read-package-json/node_modules/normalize-package-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", + "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", + "inBundle": true, + "dependencies": { + "hosted-git-info": "^5.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, "node_modules/read-package-tree": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/read-package-tree/-/read-package-tree-5.3.1.tgz", @@ -13798,43 +13798,6 @@ "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" - }, - "dependencies": { - "hosted-git-info": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", - "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", - "requires": { - "lru-cache": "^7.5.1" - } - }, - "lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" - }, - "normalize-package-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", - "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", - "requires": { - "hosted-git-info": "^5.0.0", - "is-core-module": "^2.8.1", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - } - }, - "read-package-json": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-5.0.0.tgz", - "integrity": "sha512-1cjk2MV5ONDMn34uxSID3X8NY7VKsXfJnjbcVdFMvHEnJOBzU6MJ7/3yg6QFVZDq5/1yFNrKBUK9kGnonyGP2Q==", - "requires": { - "glob": "^7.2.0", - "json-parse-even-better-errors": "^2.3.1", - "normalize-package-data": "^4.0.0", - "npm-normalize-package-bin": "^1.0.1" - } - } } }, "inline-style-parser": { @@ -15522,6 +15485,19 @@ "rimraf": "^3.0.2", "ssri": "^8.0.1", "tar": "^6.1.11" + }, + "dependencies": { + "read-package-json": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-4.1.2.tgz", + "integrity": "sha512-Dqer4pqzamDE2O4M55xp1qZMuLPqi4ldk2ya648FOMHRjwMzFhuxVrG04wd0c38IsvkVdr3vgHI6z+QTPdAjrQ==", + "requires": { + "glob": "^7.1.1", + "json-parse-even-better-errors": "^2.3.0", + "normalize-package-data": "^3.0.0", + "npm-normalize-package-bin": "^1.0.0" + } + } } }, "parent-module": { @@ -15891,14 +15867,40 @@ "integrity": "sha512-HJpV9bQpkl6KwjxlJcBoqu9Ba0PQg8TqSNIOrulGt54a0uup0HtevreFHzYzkm0lpnleRdNBzXznKrgxglEHQw==" }, "read-package-json": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-4.1.2.tgz", - "integrity": "sha512-Dqer4pqzamDE2O4M55xp1qZMuLPqi4ldk2ya648FOMHRjwMzFhuxVrG04wd0c38IsvkVdr3vgHI6z+QTPdAjrQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-5.0.0.tgz", + "integrity": "sha512-1cjk2MV5ONDMn34uxSID3X8NY7VKsXfJnjbcVdFMvHEnJOBzU6MJ7/3yg6QFVZDq5/1yFNrKBUK9kGnonyGP2Q==", "requires": { - "glob": "^7.1.1", - "json-parse-even-better-errors": "^2.3.0", - "normalize-package-data": "^3.0.0", - "npm-normalize-package-bin": "^1.0.0" + "glob": "^7.2.0", + "json-parse-even-better-errors": "^2.3.1", + "normalize-package-data": "^4.0.0", + "npm-normalize-package-bin": "^1.0.1" + }, + "dependencies": { + "hosted-git-info": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", + "requires": { + "lru-cache": "^7.5.1" + } + }, + "lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" + }, + "normalize-package-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", + "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", + "requires": { + "hosted-git-info": "^5.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + } + } } }, "read-package-json-fast": { diff --git a/package.json b/package.json index 834be1ab93908..2a9b5b3c94675 100644 --- a/package.json +++ b/package.json @@ -112,7 +112,7 @@ "proc-log": "^2.0.0", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", - "read-package-json": "^4.1.2", + "read-package-json": "^5.0.0", "read-package-json-fast": "^2.0.3", "readdir-scoped-modules": "^1.1.0", "rimraf": "^3.0.2", From 17c4ca2af7f3a6224c74f3a9d294c63c2bbd4c33 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 15 Mar 2022 12:50:41 -0700 Subject: [PATCH 07/11] deps: pacote@13.0.5 --- .../node_modules/read-package-json/LICENSE | 15 - .../read-package-json/package.json | 46 -- .../read-package-json/read-json.js | 603 ------------------ node_modules/pacote/package.json | 4 +- package-lock.json | 63 +- package.json | 2 +- workspaces/arborist/package.json | 2 +- workspaces/libnpmdiff/package.json | 2 +- workspaces/libnpmexec/package.json | 2 +- workspaces/libnpmpack/package.json | 2 +- 10 files changed, 24 insertions(+), 717 deletions(-) delete mode 100644 node_modules/pacote/node_modules/read-package-json/LICENSE delete mode 100644 node_modules/pacote/node_modules/read-package-json/package.json delete mode 100644 node_modules/pacote/node_modules/read-package-json/read-json.js diff --git a/node_modules/pacote/node_modules/read-package-json/LICENSE b/node_modules/pacote/node_modules/read-package-json/LICENSE deleted file mode 100644 index 052085c436514..0000000000000 --- a/node_modules/pacote/node_modules/read-package-json/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/read-package-json/package.json b/node_modules/pacote/node_modules/read-package-json/package.json deleted file mode 100644 index c86cf45f36d55..0000000000000 --- a/node_modules/pacote/node_modules/read-package-json/package.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "read-package-json", - "version": "4.1.2", - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "description": "The thing npm uses to read package.json files with semantics and defaults and validation", - "repository": { - "type": "git", - "url": "https://github.com/npm/read-package-json.git" - }, - "main": "read-json.js", - "scripts": { - "prerelease": "npm t", - "postrelease": "npm publish && git push --follow-tags", - "release": "standard-version -s", - "test": "tap --nyc-arg=--all --coverage test/*.js", - "npmclilint": "npmcli-lint", - "lint": "npm run npmclilint -- --ignore-pattern test/fixtures \"*.*js\" \"test/**/*.*js\"", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint --", - "postsnap": "npm run lintfix --" - }, - "dependencies": { - "glob": "^7.1.1", - "json-parse-even-better-errors": "^2.3.0", - "normalize-package-data": "^3.0.0", - "npm-normalize-package-bin": "^1.0.0" - }, - "devDependencies": { - "@npmcli/lint": "^1.0.2", - "standard-version": "^9.3.1", - "tap": "^15.0.9" - }, - "license": "ISC", - "files": [ - "read-json.js" - ], - "engines": { - "node": ">=10" - }, - "tap": { - "branches": 68, - "functions": 83, - "lines": 76, - "statements": 77 - } -} diff --git a/node_modules/pacote/node_modules/read-package-json/read-json.js b/node_modules/pacote/node_modules/read-package-json/read-json.js deleted file mode 100644 index cb2b495521570..0000000000000 --- a/node_modules/pacote/node_modules/read-package-json/read-json.js +++ /dev/null @@ -1,603 +0,0 @@ -var fs = require('fs') - -var path = require('path') - -var glob = require('glob') -var normalizeData = require('normalize-package-data') -var safeJSON = require('json-parse-even-better-errors') -var util = require('util') -var normalizePackageBin = require('npm-normalize-package-bin') - -module.exports = readJson - -// put more stuff on here to customize. -readJson.extraSet = [ - bundleDependencies, - gypfile, - serverjs, - scriptpath, - authors, - readme, - mans, - bins, - githead, - fillTypes, -] - -var typoWarned = {} -var cache = {} - -function readJson (file, log_, strict_, cb_) { - var log, strict, cb - for (var i = 1; i < arguments.length - 1; i++) { - if (typeof arguments[i] === 'boolean') { - strict = arguments[i] - } else if (typeof arguments[i] === 'function') { - log = arguments[i] - } - } - - if (!log) { - log = function () {} - } - cb = arguments[arguments.length - 1] - - readJson_(file, log, strict, cb) -} - -function readJson_ (file, log, strict, cb) { - fs.readFile(file, 'utf8', function (er, d) { - parseJson(file, er, d, log, strict, cb) - }) -} - -function stripBOM (content) { - // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) - // because the buffer-to-string conversion in `fs.readFileSync()` - // translates it to FEFF, the UTF-16 BOM. - if (content.charCodeAt(0) === 0xFEFF) { - content = content.slice(1) - } - return content -} - -function jsonClone (obj) { - if (obj == null) { - return obj - } else if (Array.isArray(obj)) { - var newarr = new Array(obj.length) - for (var ii in obj) { - newarr[ii] = obj[ii] - } - } else if (typeof obj === 'object') { - var newobj = {} - for (var kk in obj) { - newobj[kk] = jsonClone[kk] - } - } else { - return obj - } -} - -function parseJson (file, er, d, log, strict, cb) { - if (er && er.code === 'ENOENT') { - return fs.stat(path.dirname(file), function (err, stat) { - if (!err && stat && !stat.isDirectory()) { - // ENOTDIR isn't used on Windows, but npm expects it. - er = Object.create(er) - er.code = 'ENOTDIR' - return cb(er) - } else { - return indexjs(file, er, log, strict, cb) - } - }) - } - if (er) { - return cb(er) - } - - if (cache[d]) { - return cb(null, jsonClone(cache[d])) - } - - var data - - try { - data = safeJSON(stripBOM(d)) - for (var key in data) { - if (/^_/.test(key)) { - delete data[key] - } - } - } catch (er) { - data = parseIndex(d) - if (!data) { - return cb(parseError(er, file)) - } - } - - extrasCached(file, d, data, log, strict, cb) -} - -function extrasCached (file, d, data, log, strict, cb) { - extras(file, data, log, strict, function (err, data) { - if (!err) { - cache[d] = jsonClone(data) - } - cb(err, data) - }) -} - -function indexjs (file, er, log, strict, cb) { - if (path.basename(file) === 'index.js') { - return cb(er) - } - - var index = path.resolve(path.dirname(file), 'index.js') - fs.readFile(index, 'utf8', function (er2, d) { - if (er2) { - return cb(er) - } - - if (cache[d]) { - return cb(null, cache[d]) - } - - var data = parseIndex(d) - if (!data) { - return cb(er) - } - - extrasCached(file, d, data, log, strict, cb) - }) -} - -readJson.extras = extras -function extras (file, data, log_, strict_, cb_) { - var log, strict, cb - for (var i = 2; i < arguments.length - 1; i++) { - if (typeof arguments[i] === 'boolean') { - strict = arguments[i] - } else if (typeof arguments[i] === 'function') { - log = arguments[i] - } - } - - if (!log) { - log = function () {} - } - cb = arguments[i] - - var set = readJson.extraSet - var n = set.length - var errState = null - set.forEach(function (fn) { - fn(file, data, then) - }) - - function then (er) { - if (errState) { - return - } - if (er) { - return cb(errState = er) - } - if (--n > 0) { - return - } - final(file, data, log, strict, cb) - } -} - -function scriptpath (file, data, cb) { - if (!data.scripts) { - return cb(null, data) - } - var k = Object.keys(data.scripts) - k.forEach(scriptpath_, data.scripts) - cb(null, data) -} - -function scriptpath_ (key) { - var s = this[key] - // This is never allowed, and only causes problems - if (typeof s !== 'string') { - return delete this[key] - } - - var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ - if (s.match(spre)) { - this[key] = this[key].replace(spre, '') - } -} - -function gypfile (file, data, cb) { - var dir = path.dirname(file) - var s = data.scripts || {} - if (s.install || s.preinstall) { - return cb(null, data) - } - - glob('*.gyp', { cwd: dir }, function (er, files) { - if (er) { - return cb(er) - } - if (data.gypfile === false) { - return cb(null, data) - } - gypfile_(file, data, files, cb) - }) -} - -function gypfile_ (file, data, files, cb) { - if (!files.length) { - return cb(null, data) - } - var s = data.scripts || {} - s.install = 'node-gyp rebuild' - data.scripts = s - data.gypfile = true - return cb(null, data) -} - -function serverjs (file, data, cb) { - var dir = path.dirname(file) - var s = data.scripts || {} - if (s.start) { - return cb(null, data) - } - glob('server.js', { cwd: dir }, function (er, files) { - if (er) { - return cb(er) - } - serverjs_(file, data, files, cb) - }) -} - -function serverjs_ (file, data, files, cb) { - if (!files.length) { - return cb(null, data) - } - var s = data.scripts || {} - s.start = 'node server.js' - data.scripts = s - return cb(null, data) -} - -function authors (file, data, cb) { - if (data.contributors) { - return cb(null, data) - } - var af = path.resolve(path.dirname(file), 'AUTHORS') - fs.readFile(af, 'utf8', function (er, ad) { - // ignore error. just checking it. - if (er) { - return cb(null, data) - } - authors_(file, data, ad, cb) - }) -} - -function authors_ (file, data, ad, cb) { - ad = ad.split(/\r?\n/g).map(function (line) { - return line.replace(/^\s*#.*$/, '').trim() - }).filter(function (line) { - return line - }) - data.contributors = ad - return cb(null, data) -} - -function readme (file, data, cb) { - if (data.readme) { - return cb(null, data) - } - var dir = path.dirname(file) - var globOpts = { cwd: dir, nocase: true, mark: true } - glob('{README,README.*}', globOpts, function (er, files) { - if (er) { - return cb(er) - } - // don't accept directories. - files = files.filter(function (file) { - return !file.match(/\/$/) - }) - if (!files.length) { - return cb() - } - var fn = preferMarkdownReadme(files) - var rm = path.resolve(dir, fn) - readme_(file, data, rm, cb) - }) -} - -function preferMarkdownReadme (files) { - var fallback = 0 - var re = /\.m?a?r?k?d?o?w?n?$/i - for (var i = 0; i < files.length; i++) { - if (files[i].match(re)) { - return files[i] - } else if (files[i].match(/README$/)) { - fallback = i - } - } - // prefer README.md, followed by README; otherwise, return - // the first filename (which could be README) - return files[fallback] -} - -function readme_ (file, data, rm, cb) { - var rmfn = path.basename(rm) - fs.readFile(rm, 'utf8', function (er, rm) { - // maybe not readable, or something. - if (er) { - return cb() - } - data.readme = rm - data.readmeFilename = rmfn - return cb(er, data) - }) -} - -function mans (file, data, cb) { - let cwd = data.directories && data.directories.man - if (data.man || !cwd) { - return cb(null, data) - } - const dirname = path.dirname(file) - cwd = path.resolve(path.dirname(file), cwd) - glob('**/*.[0-9]', { cwd }, function (er, mans) { - if (er) { - return cb(er) - } - data.man = mans.map(man => path.relative(dirname, path.join(cwd, man))) - return cb(null, data) - }) -} - -function bins (file, data, cb) { - data = normalizePackageBin(data) - - var m = data.directories && data.directories.bin - if (data.bin || !m) { - return cb(null, data) - } - - m = path.resolve(path.dirname(file), m) - glob('**', { cwd: m }, function (er, bins) { - if (er) { - return cb(er) - } - bins_(file, data, bins, cb) - }) -} - -function bins_ (file, data, bins, cb) { - var m = (data.directories && data.directories.bin) || '.' - data.bin = bins.reduce(function (acc, mf) { - if (mf && mf.charAt(0) !== '.') { - var f = path.basename(mf) - acc[f] = path.join(m, mf) - } - return acc - }, {}) - return cb(null, normalizePackageBin(data)) -} - -function bundleDependencies (file, data, cb) { - var bd = 'bundleDependencies' - var bdd = 'bundledDependencies' - // normalize key name - if (data[bdd] !== undefined) { - if (data[bd] === undefined) { - data[bd] = data[bdd] - } - delete data[bdd] - } - if (data[bd] === false) { - delete data[bd] - } else if (data[bd] === true) { - data[bd] = Object.keys(data.dependencies || {}) - } else if (data[bd] !== undefined && !Array.isArray(data[bd])) { - delete data[bd] - } - return cb(null, data) -} - -function githead (file, data, cb) { - if (data.gitHead) { - return cb(null, data) - } - var dir = path.dirname(file) - var head = path.resolve(dir, '.git/HEAD') - fs.readFile(head, 'utf8', function (er, head) { - if (er) { - var parent = path.dirname(dir) - if (parent === dir) { - return cb(null, data) - } - return githead(dir, data, cb) - } - githead_(data, dir, head, cb) - }) -} - -function githead_ (data, dir, head, cb) { - if (!head.match(/^ref: /)) { - data.gitHead = head.trim() - return cb(null, data) - } - var headRef = head.replace(/^ref: /, '').trim() - var headFile = path.resolve(dir, '.git', headRef) - fs.readFile(headFile, 'utf8', function (er, head) { - if (er || !head) { - var packFile = path.resolve(dir, '.git/packed-refs') - return fs.readFile(packFile, 'utf8', function (er, refs) { - if (er || !refs) { - return cb(null, data) - } - refs = refs.split('\n') - for (var i = 0; i < refs.length; i++) { - var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/) - if (match && match[2].trim() === headRef) { - data.gitHead = match[1] - break - } - } - return cb(null, data) - }) - } - head = head.replace(/^ref: /, '').trim() - data.gitHead = head - return cb(null, data) - }) -} - -/** - * Warn if the bin references don't point to anything. This might be better in - * normalize-package-data if it had access to the file path. - */ -function checkBinReferences_ (file, data, warn, cb) { - if (!(data.bin instanceof Object)) { - return cb() - } - - var keys = Object.keys(data.bin) - var keysLeft = keys.length - if (!keysLeft) { - return cb() - } - - function handleExists (relName, result) { - keysLeft-- - if (!result) { - warn('No bin file found at ' + relName) - } - if (!keysLeft) { - cb() - } - } - - keys.forEach(function (key) { - var dirName = path.dirname(file) - var relName = data.bin[key] - /* istanbul ignore if - impossible, bins have been normalized */ - if (typeof relName !== 'string') { - var msg = 'Bin filename for ' + key + - ' is not a string: ' + util.inspect(relName) - warn(msg) - delete data.bin[key] - handleExists(relName, true) - return - } - var binPath = path.resolve(dirName, relName) - fs.stat(binPath, (err) => handleExists(relName, !err)) - }) -} - -function final (file, data, log, strict, cb) { - var pId = makePackageId(data) - - function warn (msg) { - if (typoWarned[pId]) { - return - } - if (log) { - log('package.json', pId, msg) - } - } - - try { - normalizeData(data, warn, strict) - } catch (error) { - return cb(error) - } - - checkBinReferences_(file, data, warn, function () { - typoWarned[pId] = true - cb(null, data) - }) -} - -function fillTypes (file, data, cb) { - var index = data.main ? data.main : 'index.js' - - if (typeof index !== 'string') { - return cb(new TypeError('The "main" attribute must be of type string.')) - } - - // TODO exports is much more complicated than this in verbose format - // We need to support for instance - - // "exports": { - // ".": [ - // { - // "default": "./lib/npm.js" - // }, - // "./lib/npm.js" - // ], - // "./package.json": "./package.json" - // }, - // as well as conditional exports - - // if (data.exports && typeof data.exports === 'string') { - // index = data.exports - // } - - // if (data.exports && data.exports['.']) { - // index = data.exports['.'] - // if (typeof index !== 'string') { - // } - // } - - var extless = - path.join(path.dirname(index), path.basename(index, path.extname(index))) - var dts = `./${extless}.d.ts` - var dtsPath = path.join(path.dirname(file), dts) - var hasDTSFields = 'types' in data || 'typings' in data - if (!hasDTSFields && fs.existsSync(dtsPath)) { - data.types = dts - } - - cb(null, data) -} - -function makePackageId (data) { - var name = cleanString(data.name) - var ver = cleanString(data.version) - return name + '@' + ver -} - -function cleanString (str) { - return (!str || typeof (str) !== 'string') ? '' : str.trim() -} - -// /**package { "name": "foo", "version": "1.2.3", ... } **/ -function parseIndex (data) { - data = data.split(/^\/\*\*package(?:\s|$)/m) - - if (data.length < 2) { - return null - } - data = data[1] - data = data.split(/\*\*\/$/m) - - if (data.length < 2) { - return null - } - data = data[0] - data = data.replace(/^\s*\*/mg, '') - - try { - return safeJSON(data) - } catch (er) { - return null - } -} - -function parseError (ex, file) { - var e = new Error('Failed to parse json\n' + ex.message) - e.code = 'EJSONPARSE' - e.path = file - return e -} diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index a527602ea7cc2..f49c23147a80c 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "13.0.4", + "version": "13.0.5", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -56,7 +56,7 @@ "npm-registry-fetch": "^13.0.1", "proc-log": "^2.0.0", "promise-retry": "^2.0.1", - "read-package-json": "^4.1.2", + "read-package-json": "^5.0.0", "read-package-json-fast": "^2.0.3", "rimraf": "^3.0.2", "ssri": "^8.0.1", diff --git a/package-lock.json b/package-lock.json index 5d70f3f4da3ba..271e2d62a6ea3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -139,7 +139,7 @@ "npm-user-validate": "^1.0.1", "npmlog": "^6.0.1", "opener": "^1.5.2", - "pacote": "^13.0.4", + "pacote": "^13.0.5", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", "qrcode-terminal": "^0.12.0", @@ -5441,7 +5441,6 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "inBundle": true, "dependencies": { "hosted-git-info": "^4.0.1", "is-core-module": "^2.5.0", @@ -5929,9 +5928,9 @@ } }, "node_modules/pacote": { - "version": "13.0.4", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-13.0.4.tgz", - "integrity": "sha512-uhkG1ZclRmL+9O2vfrDUIDSTPIbSClCe9BUySy8IAkuF80eG51yZB+9hfStOF/O0LwVn7PcjqdGe+SJPxRp7jg==", + "version": "13.0.5", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-13.0.5.tgz", + "integrity": "sha512-6CYfot3/rUAn3qqzF2d/jrrXm5HlBtvaSgfmg0VtOUAdJ8fbSq21BJwftMGArkL71yXHIbUJ7Bt5B04547HELA==", "inBundle": true, "dependencies": { "@npmcli/git": "^3.0.0", @@ -5950,7 +5949,7 @@ "npm-registry-fetch": "^13.0.1", "proc-log": "^2.0.0", "promise-retry": "^2.0.1", - "read-package-json": "^4.1.2", + "read-package-json": "^5.0.0", "read-package-json-fast": "^2.0.3", "rimraf": "^3.0.2", "ssri": "^8.0.1", @@ -5963,21 +5962,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/pacote/node_modules/read-package-json": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-4.1.2.tgz", - "integrity": "sha512-Dqer4pqzamDE2O4M55xp1qZMuLPqi4ldk2ya648FOMHRjwMzFhuxVrG04wd0c38IsvkVdr3vgHI6z+QTPdAjrQ==", - "inBundle": true, - "dependencies": { - "glob": "^7.1.1", - "json-parse-even-better-errors": "^2.3.0", - "normalize-package-data": "^3.0.0", - "npm-normalize-package-bin": "^1.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -10457,7 +10441,7 @@ "npm-pick-manifest": "^7.0.0", "npm-registry-fetch": "^13.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.2", + "pacote": "^13.0.5", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", "promise-all-reject-late": "^1.0.0", @@ -10529,7 +10513,7 @@ "diff": "^5.0.0", "minimatch": "^3.0.4", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.4", + "pacote": "^13.0.5", "tar": "^6.1.0" }, "devDependencies": { @@ -10552,7 +10536,7 @@ "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^9.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.4", + "pacote": "^13.0.5", "proc-log": "^2.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", @@ -10694,7 +10678,7 @@ "dependencies": { "@npmcli/run-script": "^3.0.0", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.4" + "pacote": "^13.0.5" }, "devDependencies": { "@npmcli/template-oss": "^2.4.2", @@ -11407,7 +11391,7 @@ "npm-pick-manifest": "^7.0.0", "npm-registry-fetch": "^13.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.2", + "pacote": "13.0.5", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", "promise-all-reject-late": "^1.0.0", @@ -14378,7 +14362,7 @@ "eslint": "^8.1.0", "minimatch": "^3.0.4", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.4", + "pacote": "13.0.5", "tap": "^15.0.9", "tar": "^6.1.0" } @@ -14395,7 +14379,7 @@ "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^9.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.4", + "pacote": "13.0.5", "proc-log": "^2.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", @@ -14502,7 +14486,7 @@ "@npmcli/template-oss": "^2.4.2", "nock": "^13.0.7", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.4", + "pacote": "13.0.5", "tap": "^15.0.0" } }, @@ -15460,9 +15444,9 @@ } }, "pacote": { - "version": "13.0.4", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-13.0.4.tgz", - "integrity": "sha512-uhkG1ZclRmL+9O2vfrDUIDSTPIbSClCe9BUySy8IAkuF80eG51yZB+9hfStOF/O0LwVn7PcjqdGe+SJPxRp7jg==", + "version": "13.0.5", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-13.0.5.tgz", + "integrity": "sha512-6CYfot3/rUAn3qqzF2d/jrrXm5HlBtvaSgfmg0VtOUAdJ8fbSq21BJwftMGArkL71yXHIbUJ7Bt5B04547HELA==", "requires": { "@npmcli/git": "^3.0.0", "@npmcli/installed-package-contents": "^1.0.7", @@ -15480,24 +15464,11 @@ "npm-registry-fetch": "^13.0.1", "proc-log": "^2.0.0", "promise-retry": "^2.0.1", - "read-package-json": "^4.1.2", + "read-package-json": "^5.0.0", "read-package-json-fast": "^2.0.3", "rimraf": "^3.0.2", "ssri": "^8.0.1", "tar": "^6.1.11" - }, - "dependencies": { - "read-package-json": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-4.1.2.tgz", - "integrity": "sha512-Dqer4pqzamDE2O4M55xp1qZMuLPqi4ldk2ya648FOMHRjwMzFhuxVrG04wd0c38IsvkVdr3vgHI6z+QTPdAjrQ==", - "requires": { - "glob": "^7.1.1", - "json-parse-even-better-errors": "^2.3.0", - "normalize-package-data": "^3.0.0", - "npm-normalize-package-bin": "^1.0.0" - } - } } }, "parent-module": { diff --git a/package.json b/package.json index 2a9b5b3c94675..dad87cd2cf4df 100644 --- a/package.json +++ b/package.json @@ -107,7 +107,7 @@ "npm-user-validate": "^1.0.1", "npmlog": "^6.0.1", "opener": "^1.5.2", - "pacote": "^13.0.4", + "pacote": "^13.0.5", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", "qrcode-terminal": "^0.12.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index e1b3c2e9ba624..b4bd00db06880 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -25,7 +25,7 @@ "npm-pick-manifest": "^7.0.0", "npm-registry-fetch": "^13.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.2", + "pacote": "^13.0.5", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", "promise-all-reject-late": "^1.0.0", diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json index 37a98a20825c4..08089683c5d4a 100644 --- a/workspaces/libnpmdiff/package.json +++ b/workspaces/libnpmdiff/package.json @@ -59,7 +59,7 @@ "diff": "^5.0.0", "minimatch": "^3.0.4", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.4", + "pacote": "^13.0.5", "tar": "^6.1.0" }, "templateOSS": { diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index e0705a2742078..f6faad30514f7 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -62,7 +62,7 @@ "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^9.0.0", "npmlog": "^6.0.1", - "pacote": "^13.0.4", + "pacote": "^13.0.5", "proc-log": "^2.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json index 690db7cbbac7d..3a342deda0de0 100644 --- a/workspaces/libnpmpack/package.json +++ b/workspaces/libnpmpack/package.json @@ -42,7 +42,7 @@ "dependencies": { "@npmcli/run-script": "^3.0.0", "npm-package-arg": "^9.0.0", - "pacote": "^13.0.4" + "pacote": "^13.0.5" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16" From 46621ae6ff80386d202b90c27966173ef3a0147d Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 15 Mar 2022 12:52:43 -0700 Subject: [PATCH 08/11] deps: npm-package-arg@9.0.1 --- .../node_modules/hosted-git-info/LICENSE | 13 + .../hosted-git-info/lib/git-host-info.js | 185 ++++++ .../hosted-git-info/lib/git-host.js | 110 ++++ .../node_modules/hosted-git-info/lib/index.js | 244 +++++++ .../node_modules/hosted-git-info/package.json | 56 ++ .../node_modules/lru-cache/LICENSE | 15 + .../node_modules/lru-cache/index.js | 615 ++++++++++++++++++ .../node_modules/lru-cache/package.json | 43 ++ node_modules/npm-package-arg/package.json | 8 +- package-lock.json | 82 ++- package.json | 2 +- workspaces/libnpmaccess/package.json | 2 +- workspaces/libnpmdiff/package.json | 2 +- workspaces/libnpmexec/package.json | 2 +- workspaces/libnpmpack/package.json | 2 +- workspaces/libnpmpublish/package.json | 2 +- 16 files changed, 1350 insertions(+), 33 deletions(-) create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host-info.js create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host.js create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/package.json create mode 100644 node_modules/npm-package-arg/node_modules/lru-cache/LICENSE create mode 100644 node_modules/npm-package-arg/node_modules/lru-cache/index.js create mode 100644 node_modules/npm-package-arg/node_modules/lru-cache/package.json diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE b/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE new file mode 100644 index 0000000000000..45055763dc838 --- /dev/null +++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host-info.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host-info.js new file mode 100644 index 0000000000000..9a9720fa3c339 --- /dev/null +++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host-info.js @@ -0,0 +1,185 @@ +/* eslint-disable max-len */ +'use strict' +const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' +const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' + +const defaults = { + sshtemplate: ({ domain, user, project, committish }) => `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, user, project, committish }) => `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + browsetemplate: ({ domain, user, project, committish, treepath }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, + browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'master')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/raw/${maybeEncode(committish) || 'master'}/${path}`, + shortcuttemplate: ({ type, user, project, committish }) => `${type}:${user}/${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ user, project, committish }) => `${user}/${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, user, project }) => `https://${domain}/${user}/${project}/issues`, + hashformat: formatHashFragment, +} + +const gitHosts = {} +gitHosts.github = Object.assign({}, defaults, { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'github.com', + treepath: 'tree', + filetemplate: ({ auth, user, project, committish, path }) => `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish) || 'master'}/${path}`, + gittemplate: ({ auth, domain, user, project, committish }) => `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, + extract: (url) => { + let [, user, project, type, committish] = url.pathname.split('/', 5) + if (type && type !== 'tree') { + return + } + + if (!type) { + committish = url.hash.slice(1) + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish } + }, +}) + +gitHosts.bitbucket = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'bitbucket.org', + treepath: 'src', + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/get/${maybeEncode(committish) || 'master'}.tar.gz`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (['get'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +}) + +gitHosts.gitlab = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gitlab.com', + treepath: 'tree', + httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish) || 'master'}`, + extract: (url) => { + const path = url.pathname.slice(1) + if (path.includes('/-/') || path.includes('/archive.tar.gz')) { + return + } + + const segments = path.split('/') + let project = segments.pop() + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + const user = segments.join('/') + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +}) + +gitHosts.gist = Object.assign({}, defaults, { + protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gist.github.com', + sshtemplate: ({ domain, project, committish }) => `git@${domain}:${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, project, committish }) => `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, + browsetemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + browsefiletemplate: ({ domain, project, committish, path, hashformat }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + docstemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + httpstemplate: ({ domain, project, committish }) => `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ user, project, committish, path }) => `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, + shortcuttemplate: ({ type, project, committish }) => `${type}:${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ project, committish }) => `${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, project }) => `https://${domain}/${project}`, + gittemplate: ({ domain, project, committish }) => `git://${domain}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ project, committish }) => `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (aux === 'raw') { + return + } + + if (!project) { + if (!user) { + return + } + + project = user + user = null + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + return { user, project, committish: url.hash.slice(1) } + }, + hashformat: function (fragment) { + return fragment && 'file-' + formatHashFragment(fragment) + }, +}) + +gitHosts.sourcehut = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'https:'], + domain: 'git.sr.ht', + treepath: 'tree', + browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'main')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'main'}/${path}`, + httpstemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'main'}.tar.gz`, + bugstemplate: ({ domain, user, project }) => `https://todo.sr.ht/${user}/${project}`, + docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + + // tarball url + if (['archive'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +}) + +const names = Object.keys(gitHosts) +gitHosts.byShortcut = {} +gitHosts.byDomain = {} +for (const name of names) { + gitHosts.byShortcut[`${name}:`] = name + gitHosts.byDomain[gitHosts[name].domain] = name +} + +function formatHashFragment (fragment) { + return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') +} + +module.exports = gitHosts diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host.js new file mode 100644 index 0000000000000..8a975e92e58bb --- /dev/null +++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host.js @@ -0,0 +1,110 @@ +'use strict' +const gitHosts = require('./git-host-info.js') + +class GitHost { + constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { + Object.assign(this, gitHosts[type]) + this.type = type + this.user = user + this.auth = auth + this.project = project + this.committish = committish + this.default = defaultRepresentation + this.opts = opts + } + + hash () { + return this.committish ? `#${this.committish}` : '' + } + + ssh (opts) { + return this._fill(this.sshtemplate, opts) + } + + _fill (template, opts) { + if (typeof template === 'function') { + const options = { ...this, ...this.opts, ...opts } + + // the path should always be set so we don't end up with 'undefined' in urls + if (!options.path) { + options.path = '' + } + + // template functions will insert the leading slash themselves + if (options.path.startsWith('/')) { + options.path = options.path.slice(1) + } + + if (options.noCommittish) { + options.committish = null + } + + const result = template(options) + return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result + } + + return null + } + + sshurl (opts) { + return this._fill(this.sshurltemplate, opts) + } + + browse (path, fragment, opts) { + // not a string, treat path as opts + if (typeof path !== 'string') { + return this._fill(this.browsetemplate, path) + } + + if (typeof fragment !== 'string') { + opts = fragment + fragment = null + } + return this._fill(this.browsefiletemplate, { ...opts, fragment, path }) + } + + docs (opts) { + return this._fill(this.docstemplate, opts) + } + + bugs (opts) { + return this._fill(this.bugstemplate, opts) + } + + https (opts) { + return this._fill(this.httpstemplate, opts) + } + + git (opts) { + return this._fill(this.gittemplate, opts) + } + + shortcut (opts) { + return this._fill(this.shortcuttemplate, opts) + } + + path (opts) { + return this._fill(this.pathtemplate, opts) + } + + tarball (opts) { + return this._fill(this.tarballtemplate, { ...opts, noCommittish: false }) + } + + file (path, opts) { + return this._fill(this.filetemplate, { ...opts, path }) + } + + getDefaultRepresentation () { + return this.default + } + + toString (opts) { + if (this.default && typeof this[this.default] === 'function') { + return this[this.default](opts) + } + + return this.sshurl(opts) + } +} +module.exports = GitHost diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js new file mode 100644 index 0000000000000..8bce6b3c28d51 --- /dev/null +++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js @@ -0,0 +1,244 @@ +'use strict' +const url = require('url') +const gitHosts = require('./git-host-info.js') +const GitHost = module.exports = require('./git-host.js') +const LRU = require('lru-cache') +const cache = new LRU({ max: 1000 }) + +const protocolToRepresentationMap = { + 'git+ssh:': 'sshurl', + 'git+https:': 'https', + 'ssh:': 'sshurl', + 'git:': 'git', +} + +function protocolToRepresentation (protocol) { + return protocolToRepresentationMap[protocol] || protocol.slice(0, -1) +} + +const authProtocols = { + 'git:': true, + 'https:': true, + 'git+https:': true, + 'http:': true, + 'git+http:': true, +} + +const knownProtocols = Object.keys(gitHosts.byShortcut) + .concat(['http:', 'https:', 'git:', 'git+ssh:', 'git+https:', 'ssh:']) + +module.exports.fromUrl = function (giturl, opts) { + if (typeof giturl !== 'string') { + return + } + + const key = giturl + JSON.stringify(opts || {}) + + if (!cache.has(key)) { + cache.set(key, fromUrl(giturl, opts)) + } + + return cache.get(key) +} + +function fromUrl (giturl, opts) { + if (!giturl) { + return + } + + const url = isGitHubShorthand(giturl) ? 'github:' + giturl : correctProtocol(giturl) + const parsed = parseGitUrl(url) + if (!parsed) { + return parsed + } + + const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] + const gitHostDomain = + gitHosts.byDomain[parsed.hostname.startsWith('www.') ? + parsed.hostname.slice(4) : + parsed.hostname] + const gitHostName = gitHostShortcut || gitHostDomain + if (!gitHostName) { + return + } + + const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] + let auth = null + if (authProtocols[parsed.protocol] && (parsed.username || parsed.password)) { + auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` + } + + let committish = null + let user = null + let project = null + let defaultRepresentation = null + + try { + if (gitHostShortcut) { + let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname + const firstAt = pathname.indexOf('@') + // we ignore auth for shortcuts, so just trim it out + if (firstAt > -1) { + pathname = pathname.slice(firstAt + 1) + } + + const lastSlash = pathname.lastIndexOf('/') + if (lastSlash > -1) { + user = decodeURIComponent(pathname.slice(0, lastSlash)) + // we want nulls only, never empty strings + if (!user) { + user = null + } + project = decodeURIComponent(pathname.slice(lastSlash + 1)) + } else { + project = decodeURIComponent(pathname) + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (parsed.hash) { + committish = decodeURIComponent(parsed.hash.slice(1)) + } + + defaultRepresentation = 'shortcut' + } else { + if (!gitHostInfo.protocols.includes(parsed.protocol)) { + return + } + + const segments = gitHostInfo.extract(parsed) + if (!segments) { + return + } + + user = segments.user && decodeURIComponent(segments.user) + project = decodeURIComponent(segments.project) + committish = decodeURIComponent(segments.committish) + defaultRepresentation = protocolToRepresentation(parsed.protocol) + } + } catch (err) { + /* istanbul ignore else */ + if (err instanceof URIError) { + return + } else { + throw err + } + } + + return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) +} + +// accepts input like git:github.com:user/repo and inserts the // after the first : +const correctProtocol = (arg) => { + const firstColon = arg.indexOf(':') + const proto = arg.slice(0, firstColon + 1) + if (knownProtocols.includes(proto)) { + return arg + } + + const firstAt = arg.indexOf('@') + if (firstAt > -1) { + if (firstAt > firstColon) { + return `git+ssh://${arg}` + } else { + return arg + } + } + + const doubleSlash = arg.indexOf('//') + if (doubleSlash === firstColon + 1) { + return arg + } + + return arg.slice(0, firstColon + 1) + '//' + arg.slice(firstColon + 1) +} + +// look for github shorthand inputs, such as npm/cli +const isGitHubShorthand = (arg) => { + // it cannot contain whitespace before the first # + // it cannot start with a / because that's probably an absolute file path + // but it must include a slash since repos are username/repository + // it cannot start with a . because that's probably a relative file path + // it cannot start with an @ because that's a scoped package if it passes the other tests + // it cannot contain a : before a # because that tells us that there's a protocol + // a second / may not exist before a # + const firstHash = arg.indexOf('#') + const firstSlash = arg.indexOf('/') + const secondSlash = arg.indexOf('/', firstSlash + 1) + const firstColon = arg.indexOf(':') + const firstSpace = /\s/.exec(arg) + const firstAt = arg.indexOf('@') + + const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) + const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) + const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) + const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) + const hasSlash = firstSlash > 0 + // if a # is found, what we really want to know is that the character + // immediately before # is not a / + const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') + const doesNotStartWithDot = !arg.startsWith('.') + + return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && + doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && + secondSlashOnlyAfterHash +} + +// attempt to correct an scp style url so that it will parse with `new URL()` +const correctUrl = (giturl) => { + const firstAt = giturl.indexOf('@') + const lastHash = giturl.lastIndexOf('#') + let firstColon = giturl.indexOf(':') + let lastColon = giturl.lastIndexOf(':', lastHash > -1 ? lastHash : Infinity) + + let corrected + if (lastColon > firstAt) { + // the last : comes after the first @ (or there is no @) + // like it would in: + // proto://hostname.com:user/repo + // username@hostname.com:user/repo + // :password@hostname.com:user/repo + // username:password@hostname.com:user/repo + // proto://username@hostname.com:user/repo + // proto://:password@hostname.com:user/repo + // proto://username:password@hostname.com:user/repo + // then we replace the last : with a / to create a valid path + corrected = giturl.slice(0, lastColon) + '/' + giturl.slice(lastColon + 1) + // // and we find our new : positions + firstColon = corrected.indexOf(':') + lastColon = corrected.lastIndexOf(':') + } + + if (firstColon === -1 && giturl.indexOf('//') === -1) { + // we have no : at all + // as it would be in: + // username@hostname.com/user/repo + // then we prepend a protocol + corrected = `git+ssh://${corrected}` + } + + return corrected +} + +// try to parse the url as its given to us, if that throws +// then we try to clean the url and parse that result instead +// THIS FUNCTION SHOULD NEVER THROW +const parseGitUrl = (giturl) => { + let result + try { + result = new url.URL(giturl) + } catch (err) {} + + if (result) { + return result + } + + const correctedUrl = correctUrl(giturl) + try { + result = new url.URL(correctedUrl) + } catch (err) {} + + return result +} diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json b/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json new file mode 100644 index 0000000000000..0153b0852cbf4 --- /dev/null +++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json @@ -0,0 +1,56 @@ +{ + "name": "hosted-git-info", + "version": "5.0.0", + "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", + "main": "./lib/index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/hosted-git-info.git" + }, + "keywords": [ + "git", + "github", + "bitbucket", + "gitlab" + ], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/hosted-git-info/issues" + }, + "homepage": "https://github.com/npm/hosted-git-info", + "scripts": { + "posttest": "npm run lint", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", + "snap": "tap", + "test": "tap", + "test:coverage": "tap --coverage-report=html", + "lint": "eslint '**/*.js'", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "lintfix": "npm run lint -- --fix" + }, + "dependencies": { + "lru-cache": "^7.5.1" + }, + "devDependencies": { + "@npmcli/template-oss": "^2.9.2", + "tap": "^15.1.6" + }, + "files": [ + "bin", + "lib" + ], + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "tap": { + "color": 1, + "coverage": true + }, + "templateOSS": { + "version": "2.9.2" + } +} diff --git a/node_modules/npm-package-arg/node_modules/lru-cache/LICENSE b/node_modules/npm-package-arg/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..9b58a3e03d1df --- /dev/null +++ b/node_modules/npm-package-arg/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-package-arg/node_modules/lru-cache/index.js b/node_modules/npm-package-arg/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..e37f51616452e --- /dev/null +++ b/node_modules/npm-package-arg/node_modules/lru-cache/index.js @@ -0,0 +1,615 @@ +const perf = typeof performance === 'object' && performance && + typeof performance.now === 'function' ? performance : Date + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} +const shouldWarn = (code) => typeof process === 'object' && + process && + !(process.noDeprecation || warned.has(code)) +const warn = (code, what, instead, fn) => { + warned.add(code) + process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => !isPosInt(max) ? null +: max <= Math.pow(2, 8) ? Uint8Array +: max <= Math.pow(2, 16) ? Uint16Array +: max <= Math.pow(2, 32) ? Uint32Array +: max <= Number.MAX_SAFE_INTEGER ? ZeroArray +: null + +class ZeroArray extends Array { + constructor (size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor (max) { + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push (n) { + this.heap[this.length++] = n + } + pop () { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor (options = {}) { + const { + max, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize, + sizeCalculation, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { + length, + maxAge, + stale, + } = options instanceof LRUCache ? {} : options + + if (!isPosInt(max)) { + throw new TypeError('max option must be an integer') + } + + const UintArray = getUintArray(max) + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize || 0 + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize') + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculating set to non-function') + } + } + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + + if (this.maxSize) { + if (!isPosInt(this.maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified') + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.updateAgeOnGet = !!updateAgeOnGet + this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified') + } + this.initializeTTLTracking() + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + initializeTTLTracking () { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + this.setItemTTL = (index, ttl) => { + this.starts[index] = ttl !== 0 ? perf.now() : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + this.updateItemAge = (index) => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout(() => cachedNow = 0, this.ttlResolution) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + this.isStale = (index) => { + return this.ttls[index] !== 0 && this.starts[index] !== 0 && + ((cachedNow || getNow()) - this.starts[index] > this.ttls[index]) + } + } + updateItemAge (index) {} + setItemTTL (index, ttl) {} + isStale (index) { return false } + + initializeSizeTracking () { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => this.calculatedSize -= this.sizes[index] + this.addItemSize = (index, v, k, size, sizeCalculation) => { + const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0) + this.sizes[index] = isPosInt(s) ? s : 0 + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict() + } + this.calculatedSize += this.sizes[index] + } + this.delete = k => { + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + this.calculatedSize -= this.sizes[index] + } + } + return LRUCache.prototype.delete.call(this, k) + } + } + removeItemSize (index) {} + addItemSize (index, v, k, size, sizeCalculation) {} + + *indexes ({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail, j; true; ) { + if (!this.isValidIndex(i)) { + break + } + j = i === this.head + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes ({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head, j; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + // either the tail now, or WAS the tail, and deleted + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex (index) { + return this.keyMap.get(this.keyList[index]) === index + } + + *entries () { + for (const i of this.indexes()) { + yield [this.keyList[i], this.valList[i]] + } + } + *rentries () { + for (const i of this.rindexes()) { + yield [this.keyList[i], this.valList[i]] + } + } + + *keys () { + for (const i of this.indexes()) { + yield this.keyList[i] + } + } + *rkeys () { + for (const i of this.rindexes()) { + yield this.keyList[i] + } + } + + *values () { + for (const i of this.indexes()) { + yield this.valList[i] + } + } + *rvalues () { + for (const i of this.rindexes()) { + yield this.valList[i] + } + } + + [Symbol.iterator] () { + return this.entries() + } + + find (fn, getOptions = {}) { + for (const i of this.indexes()) { + if (fn(this.valList[i], this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach (fn, thisp = this) { + for (const i of this.indexes()) { + fn.call(thisp, this.valList[i], this.keyList[i], this) + } + } + + rforEach (fn, thisp = this) { + for (const i of this.rindexes()) { + fn.call(thisp, this.valList[i], this.keyList[i], this) + } + } + + get prune () { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale () { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump () { + const arr = [] + for (const i of this.indexes()) { + const key = this.keyList[i] + const value = this.valList[i] + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load (arr) { + this.clear() + for (const [key, entry] of arr) { + this.set(key, entry.value, entry) + } + } + + dispose (v, k, reason) {} + + set (k, v, { + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + } = {}) { + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size ++ + this.addItemSize(index, v, k, size, sizeCalculation) + noUpdateTTL = false + } else { + // update + const oldVal = this.valList[index] + if (v !== oldVal) { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, v, k, size, sizeCalculation) + } + this.moveToTail(index) + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl) + } + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex () { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max) { + return this.evict() + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop () { + if (this.size) { + const val = this.valList[this.head] + this.evict() + return val + } + } + + evict () { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + this.removeItemSize(head) + this.head = this.next[head] + this.keyMap.delete(k) + this.size -- + return head + } + + has (k) { + return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k)) + } + + // like get(), but without any LRU updating or TTL expiration + peek (k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + return this.valList[index] + } + } + + get (k, { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (this.isStale(index)) { + const value = allowStale ? this.valList[index] : undefined + this.delete(k) + return value + } else { + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return this.valList[index] + } + } + } + + connect (p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail (index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del () { + deprecatedMethod('del', 'delete') + return this.delete + } + delete (k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + this.dispose(this.valList[index], k, 'delete') + if (this.disposeAfter) { + this.disposed.push([this.valList[index], k, 'delete']) + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size -- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear () { + if (this.dispose !== LRUCache.prototype.dispose) { + for (const index of this.rindexes({ allowStale: true })) { + this.dispose(this.valList[index], this.keyList[index], 'delete') + } + } + if (this.disposeAfter) { + for (const index of this.rindexes({ allowStale: true })) { + this.disposed.push([this.valList[index], this.keyList[index], 'delete']) + } + } + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + get reset () { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length () { + deprecatedProperty('length', 'size') + return this.size + } +} + +module.exports = LRUCache diff --git a/node_modules/npm-package-arg/node_modules/lru-cache/package.json b/node_modules/npm-package-arg/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..a62f74c2b648a --- /dev/null +++ b/node_modules/npm-package-arg/node_modules/lru-cache/package.json @@ -0,0 +1,43 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "7.5.1", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "build": "", + "test": "tap", + "snap": "tap", + "size": "size-limit", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "benchmark": "^2.1.4", + "size-limit": "^7.0.8", + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "index.js" + ], + "engines": { + "node": ">=12" + }, + "tap": { + "coverage-map": "map.js" + }, + "size-limit": [ + { + "path": "./index.js" + } + ] +} diff --git a/node_modules/npm-package-arg/package.json b/node_modules/npm-package-arg/package.json index b0821312a7f0d..25356af373982 100644 --- a/node_modules/npm-package-arg/package.json +++ b/node_modules/npm-package-arg/package.json @@ -1,6 +1,6 @@ { "name": "npm-package-arg", - "version": "9.0.0", + "version": "9.0.1", "description": "Parse the things that can be arguments to `npm install`", "main": "./lib/npa.js", "directories": { @@ -11,12 +11,12 @@ "lib" ], "dependencies": { - "hosted-git-info": "^4.1.0", + "hosted-git-info": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-name": "^3.0.0" }, "devDependencies": { - "@npmcli/template-oss": "^2.7.1", + "@npmcli/template-oss": "^2.9.2", "tap": "^15.1.6" }, "scripts": { @@ -50,6 +50,6 @@ "branches": 97 }, "templateOSS": { - "version": "2.7.1" + "version": "2.9.2" } } diff --git a/package-lock.json b/package-lock.json index 271e2d62a6ea3..7ac0c36039878 100644 --- a/package-lock.json +++ b/package-lock.json @@ -132,7 +132,7 @@ "nopt": "^5.0.0", "npm-audit-report": "^2.1.5", "npm-install-checks": "^4.0.0", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npm-pick-manifest": "^7.0.0", "npm-profile": "^6.0.2", "npm-registry-fetch": "^13.0.1", @@ -5506,12 +5506,12 @@ "inBundle": true }, "node_modules/npm-package-arg": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-9.0.0.tgz", - "integrity": "sha512-yhzXxeor+Zfhe5MGwPdDumz6HtNlj2pMekWB95IX3CC6uDNgde0oPKHDCLDPoJqQfd0HqAWt+y4Hs5m7CK1+9Q==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-9.0.1.tgz", + "integrity": "sha512-Xs9wznfEAmZAR61qsYH3iN24V/qMYYkvAR5CRQNMvC6PjN2fHtO8y9XP/xdp5K+Icx+u1wMBMgWRPCmAEChSog==", "inBundle": true, "dependencies": { - "hosted-git-info": "^4.1.0", + "hosted-git-info": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-name": "^3.0.0" }, @@ -5519,6 +5519,27 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, + "node_modules/npm-package-arg/node_modules/hosted-git-info": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", + "inBundle": true, + "dependencies": { + "lru-cache": "^7.5.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, + "node_modules/npm-package-arg/node_modules/lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/npm-packlist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-4.0.0.tgz", @@ -10476,7 +10497,7 @@ "dependencies": { "aproba": "^2.0.0", "minipass": "^3.1.1", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0" }, "devDependencies": { @@ -10512,7 +10533,7 @@ "binary-extensions": "^2.2.0", "diff": "^5.0.0", "minimatch": "^3.0.4", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "pacote": "^13.0.5", "tar": "^6.1.0" }, @@ -10534,7 +10555,7 @@ "@npmcli/run-script": "^3.0.0", "chalk": "^4.1.0", "mkdirp-infer-owner": "^2.0.0", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npmlog": "^6.0.1", "pacote": "^13.0.5", "proc-log": "^2.0.0", @@ -10677,7 +10698,7 @@ "license": "ISC", "dependencies": { "@npmcli/run-script": "^3.0.0", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "pacote": "^13.0.5" }, "devDependencies": { @@ -10694,7 +10715,7 @@ "license": "ISC", "dependencies": { "normalize-package-data": "^3.0.2", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", "semver": "^7.1.3", "ssri": "^8.0.1" @@ -11391,7 +11412,7 @@ "npm-pick-manifest": "^7.0.0", "npm-registry-fetch": "^13.0.0", "npmlog": "^6.0.1", - "pacote": "13.0.5", + "pacote": "^13.0.5", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", "promise-all-reject-late": "^1.0.0", @@ -14332,7 +14353,7 @@ "aproba": "^2.0.0", "minipass": "^3.1.1", "nock": "^12.0.1", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", "tap": "^15.1.0" }, @@ -14361,8 +14382,8 @@ "diff": "^5.0.0", "eslint": "^8.1.0", "minimatch": "^3.0.4", - "npm-package-arg": "^9.0.0", - "pacote": "13.0.5", + "npm-package-arg": "^9.0.1", + "pacote": "^13.0.5", "tap": "^15.0.9", "tar": "^6.1.0" } @@ -14377,9 +14398,9 @@ "bin-links": "^3.0.0", "chalk": "^4.1.0", "mkdirp-infer-owner": "^2.0.0", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npmlog": "^6.0.1", - "pacote": "13.0.5", + "pacote": "^13.0.5", "proc-log": "^2.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", @@ -14485,8 +14506,8 @@ "@npmcli/run-script": "^3.0.0", "@npmcli/template-oss": "^2.4.2", "nock": "^13.0.7", - "npm-package-arg": "^9.0.0", - "pacote": "13.0.5", + "npm-package-arg": "^9.0.1", + "pacote": "^13.0.5", "tap": "^15.0.0" } }, @@ -14498,7 +14519,7 @@ "lodash.clonedeep": "^4.5.0", "nock": "^12.0.2", "normalize-package-data": "^3.0.2", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", "semver": "^7.1.3", "ssri": "^8.0.1", @@ -15135,13 +15156,28 @@ "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==" }, "npm-package-arg": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-9.0.0.tgz", - "integrity": "sha512-yhzXxeor+Zfhe5MGwPdDumz6HtNlj2pMekWB95IX3CC6uDNgde0oPKHDCLDPoJqQfd0HqAWt+y4Hs5m7CK1+9Q==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-9.0.1.tgz", + "integrity": "sha512-Xs9wznfEAmZAR61qsYH3iN24V/qMYYkvAR5CRQNMvC6PjN2fHtO8y9XP/xdp5K+Icx+u1wMBMgWRPCmAEChSog==", "requires": { - "hosted-git-info": "^4.1.0", + "hosted-git-info": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-name": "^3.0.0" + }, + "dependencies": { + "hosted-git-info": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", + "requires": { + "lru-cache": "^7.5.1" + } + }, + "lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" + } } }, "npm-packlist": { diff --git a/package.json b/package.json index dad87cd2cf4df..29d47da5498b5 100644 --- a/package.json +++ b/package.json @@ -100,7 +100,7 @@ "nopt": "^5.0.0", "npm-audit-report": "^2.1.5", "npm-install-checks": "^4.0.0", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npm-pick-manifest": "^7.0.0", "npm-profile": "^6.0.2", "npm-registry-fetch": "^13.0.1", diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json index ac5e8e7b21247..6ee76c86cc7a7 100644 --- a/workspaces/libnpmaccess/package.json +++ b/workspaces/libnpmaccess/package.json @@ -33,7 +33,7 @@ "dependencies": { "aproba": "^2.0.0", "minipass": "^3.1.1", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0" }, "engines": { diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json index 08089683c5d4a..8bf47df1200bf 100644 --- a/workspaces/libnpmdiff/package.json +++ b/workspaces/libnpmdiff/package.json @@ -58,7 +58,7 @@ "binary-extensions": "^2.2.0", "diff": "^5.0.0", "minimatch": "^3.0.4", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "pacote": "^13.0.5", "tar": "^6.1.0" }, diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index f6faad30514f7..68b88578b4764 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -60,7 +60,7 @@ "@npmcli/run-script": "^3.0.0", "chalk": "^4.1.0", "mkdirp-infer-owner": "^2.0.0", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npmlog": "^6.0.1", "pacote": "^13.0.5", "proc-log": "^2.0.0", diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json index 3a342deda0de0..c25e1ef112904 100644 --- a/workspaces/libnpmpack/package.json +++ b/workspaces/libnpmpack/package.json @@ -41,7 +41,7 @@ "homepage": "https://npmjs.com/package/libnpmpack", "dependencies": { "@npmcli/run-script": "^3.0.0", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "pacote": "^13.0.5" }, "engines": { diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json index 441a10b17c0a4..50b470e134b29 100644 --- a/workspaces/libnpmpublish/package.json +++ b/workspaces/libnpmpublish/package.json @@ -45,7 +45,7 @@ "homepage": "https://npmjs.com/package/libnpmpublish", "dependencies": { "normalize-package-data": "^3.0.2", - "npm-package-arg": "^9.0.0", + "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", "semver": "^7.1.3", "ssri": "^8.0.1" From ecbacb3d3e41951d478d6eb8a65c224c5ada1531 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 15 Mar 2022 12:53:32 -0700 Subject: [PATCH 09/11] deps: normalize-package-data@4.0.0 --- node_modules/normalize-package-data/AUTHORS | 4 - .../normalize-package-data/lib/fixer.js | 21 +- .../node_modules/hosted-git-info/LICENSE | 0 .../hosted-git-info/lib/git-host-info.js | 0 .../hosted-git-info/lib/git-host.js | 0 .../node_modules/hosted-git-info/lib/index.js | 0 .../node_modules/hosted-git-info/package.json | 0 .../node_modules/lru-cache/LICENSE | 0 .../node_modules/lru-cache/index.js | 0 .../node_modules/lru-cache/package.json | 0 .../normalize-package-data/package.json | 41 +- .../normalize-package-data/LICENSE | 15 - .../lib/extract_description.js | 22 - .../normalize-package-data/lib/fixer.js | 475 ------------------ .../lib/make_warning.js | 22 - .../normalize-package-data/lib/normalize.js | 48 -- .../normalize-package-data/lib/safe_format.js | 11 - .../normalize-package-data/lib/typos.json | 25 - .../lib/warning_messages.json | 30 -- .../normalize-package-data/package.json | 52 -- package-lock.json | 133 ++--- workspaces/libnpmpublish/package.json | 2 +- 22 files changed, 92 insertions(+), 809 deletions(-) delete mode 100644 node_modules/normalize-package-data/AUTHORS rename node_modules/{read-package-json => normalize-package-data}/node_modules/hosted-git-info/LICENSE (100%) rename node_modules/{read-package-json => normalize-package-data}/node_modules/hosted-git-info/lib/git-host-info.js (100%) rename node_modules/{read-package-json => normalize-package-data}/node_modules/hosted-git-info/lib/git-host.js (100%) rename node_modules/{read-package-json => normalize-package-data}/node_modules/hosted-git-info/lib/index.js (100%) rename node_modules/{read-package-json => normalize-package-data}/node_modules/hosted-git-info/package.json (100%) rename node_modules/{read-package-json => normalize-package-data}/node_modules/lru-cache/LICENSE (100%) rename node_modules/{read-package-json => normalize-package-data}/node_modules/lru-cache/index.js (100%) rename node_modules/{read-package-json => normalize-package-data}/node_modules/lru-cache/package.json (100%) delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/LICENSE delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/lib/extract_description.js delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/lib/fixer.js delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/lib/make_warning.js delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/lib/normalize.js delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/lib/safe_format.js delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/lib/typos.json delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/lib/warning_messages.json delete mode 100644 node_modules/read-package-json/node_modules/normalize-package-data/package.json diff --git a/node_modules/normalize-package-data/AUTHORS b/node_modules/normalize-package-data/AUTHORS deleted file mode 100644 index 66282ba1d1197..0000000000000 --- a/node_modules/normalize-package-data/AUTHORS +++ /dev/null @@ -1,4 +0,0 @@ -# Names sorted by how much code was originally theirs. -Isaac Z. Schlueter -Meryn Stol -Robert Kowalski diff --git a/node_modules/normalize-package-data/lib/fixer.js b/node_modules/normalize-package-data/lib/fixer.js index 97c26b26b461d..0846f2c045a6e 100644 --- a/node_modules/normalize-package-data/lib/fixer.js +++ b/node_modules/normalize-package-data/lib/fixer.js @@ -8,6 +8,8 @@ var extractDescription = require('./extract_description') var url = require('url') var typos = require('./typos.json') +var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.')) + module.exports = { // default warning function warn: function () {}, @@ -127,7 +129,7 @@ module.exports = { if (!data.dependencies) { data.dependencies = {} } - if (Object.prototype.hasOwnProperty.call(data.dependencies, bd)) { + if (!Object.prototype.hasOwnProperty.call(data.dependencies, bd)) { this.warn('nonDependencyBundleDependency', bd) data.dependencies[bd] = '*' } @@ -213,7 +215,7 @@ module.exports = { fixNameField: function (data, options) { if (typeof options === 'boolean') { - options = {strict: options} + options = { strict: options } } else if (typeof options === 'undefined') { options = {} } @@ -261,16 +263,15 @@ module.exports = { if (!data.bugs && data.repository && data.repository.url) { var hosted = hostedGitInfo.fromUrl(data.repository.url) if (hosted && hosted.bugs()) { - data.bugs = {url: hosted.bugs()} + data.bugs = { url: hosted.bugs() } } } else if (data.bugs) { - var emailRe = /^.+@.*\..+$/ if (typeof data.bugs === 'string') { - if (emailRe.test(data.bugs)) { - data.bugs = {email: data.bugs} + if (isEmail(data.bugs)) { + data.bugs = { email: data.bugs } /* eslint-disable-next-line node/no-deprecated-api */ } else if (url.parse(data.bugs).protocol) { - data.bugs = {url: data.bugs} + data.bugs = { url: data.bugs } } else { this.warn('nonEmailUrlBugsString') } @@ -287,7 +288,7 @@ module.exports = { } } if (oldBugs.email) { - if (typeof (oldBugs.email) === 'string' && emailRe.test(oldBugs.email)) { + if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) { data.bugs.email = oldBugs.email } else { this.warn('nonEmailBugsEmailField') @@ -399,8 +400,8 @@ function parsePerson (person) { return person } var name = person.match(/^([^(<]+)/) - var url = person.match(/\(([^)]+)\)/) - var email = person.match(/<([^>]+)>/) + var url = person.match(/\(([^()]+)\)/) + var email = person.match(/<([^<>]+)>/) var obj = {} if (name && name[0].trim()) { obj.name = name[0].trim() diff --git a/node_modules/read-package-json/node_modules/hosted-git-info/LICENSE b/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE similarity index 100% rename from node_modules/read-package-json/node_modules/hosted-git-info/LICENSE rename to node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE diff --git a/node_modules/read-package-json/node_modules/hosted-git-info/lib/git-host-info.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host-info.js similarity index 100% rename from node_modules/read-package-json/node_modules/hosted-git-info/lib/git-host-info.js rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host-info.js diff --git a/node_modules/read-package-json/node_modules/hosted-git-info/lib/git-host.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host.js similarity index 100% rename from node_modules/read-package-json/node_modules/hosted-git-info/lib/git-host.js rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host.js diff --git a/node_modules/read-package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js similarity index 100% rename from node_modules/read-package-json/node_modules/hosted-git-info/lib/index.js rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js diff --git a/node_modules/read-package-json/node_modules/hosted-git-info/package.json b/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json similarity index 100% rename from node_modules/read-package-json/node_modules/hosted-git-info/package.json rename to node_modules/normalize-package-data/node_modules/hosted-git-info/package.json diff --git a/node_modules/read-package-json/node_modules/lru-cache/LICENSE b/node_modules/normalize-package-data/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/read-package-json/node_modules/lru-cache/LICENSE rename to node_modules/normalize-package-data/node_modules/lru-cache/LICENSE diff --git a/node_modules/read-package-json/node_modules/lru-cache/index.js b/node_modules/normalize-package-data/node_modules/lru-cache/index.js similarity index 100% rename from node_modules/read-package-json/node_modules/lru-cache/index.js rename to node_modules/normalize-package-data/node_modules/lru-cache/index.js diff --git a/node_modules/read-package-json/node_modules/lru-cache/package.json b/node_modules/normalize-package-data/node_modules/lru-cache/package.json similarity index 100% rename from node_modules/read-package-json/node_modules/lru-cache/package.json rename to node_modules/normalize-package-data/node_modules/lru-cache/package.json diff --git a/node_modules/normalize-package-data/package.json b/node_modules/normalize-package-data/package.json index 17f167ea13f51..a6f1244eb5a25 100644 --- a/node_modules/normalize-package-data/package.json +++ b/node_modules/normalize-package-data/package.json @@ -1,7 +1,7 @@ { "name": "normalize-package-data", - "version": "3.0.3", - "author": "Meryn Stol ", + "version": "4.0.0", + "author": "GitHub Inc.", "description": "Normalizes data that can be found in package.json files.", "license": "BSD-2-Clause", "repository": { @@ -13,29 +13,40 @@ "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "preversion": "npm test", - "test": "tap test/*.js --branches 85 --functions 90 --lines 85 --statements 85", + "test": "tap", "npmclilint": "npmcli-lint", - "lint": "npm run npmclilint -- \"lib/**/*.*js\" \"test/**/*.*js\"", + "lint": "eslint '**/*.js'", "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint --", - "postsnap": "npm run lintfix --" + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "snap": "tap" }, "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" + "hosted-git-info": "^5.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" }, "devDependencies": { - "@npmcli/lint": "^1.0.2", + "@npmcli/template-oss": "^2.9.2", "tap": "^15.0.9" }, "files": [ - "lib/*.js", - "lib/*.json", - "AUTHORS" + "bin", + "lib" ], "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" + }, + "templateOSS": { + "version": "2.9.2" + }, + "tap": { + "branches": 86, + "functions": 92, + "lines": 86, + "statements": 86 } } diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/LICENSE b/node_modules/read-package-json/node_modules/normalize-package-data/LICENSE deleted file mode 100644 index 19d1364a8ac08..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -This package contains code originally written by Isaac Z. Schlueter. -Used with permission. - -Copyright (c) Meryn Stol ("Author") -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/extract_description.js deleted file mode 100644 index bf9896812e5f5..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/lib/extract_description.js +++ /dev/null @@ -1,22 +0,0 @@ -module.exports = extractDescription - -// Extracts description from contents of a readme file in markdown format -function extractDescription (d) { - if (!d) { - return - } - if (d === 'ERROR: No README data found!') { - return - } - // the first block of text before the first heading - // that isn't the first line heading - d = d.trim().split('\n') - for (var s = 0; d[s] && d[s].trim().match(/^(#|$)/); s++) { - ; - } - var l = d.length - for (var e = s + 1; e < l && d[e].trim(); e++) { - ; - } - return d.slice(s, e).join(' ').trim() -} diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/lib/fixer.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/fixer.js deleted file mode 100644 index 0846f2c045a6e..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/lib/fixer.js +++ /dev/null @@ -1,475 +0,0 @@ -var isValidSemver = require('semver/functions/valid') -var cleanSemver = require('semver/functions/clean') -var validateLicense = require('validate-npm-package-license') -var hostedGitInfo = require('hosted-git-info') -var isBuiltinModule = require('is-core-module') -var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'] -var extractDescription = require('./extract_description') -var url = require('url') -var typos = require('./typos.json') - -var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.')) - -module.exports = { - // default warning function - warn: function () {}, - - fixRepositoryField: function (data) { - if (data.repositories) { - this.warn('repositories') - data.repository = data.repositories[0] - } - if (!data.repository) { - return this.warn('missingRepository') - } - if (typeof data.repository === 'string') { - data.repository = { - type: 'git', - url: data.repository, - } - } - var r = data.repository.url || '' - if (r) { - var hosted = hostedGitInfo.fromUrl(r) - if (hosted) { - r = data.repository.url - = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString() - } - } - - if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) { - this.warn('brokenGitUrl', r) - } - }, - - fixTypos: function (data) { - Object.keys(typos.topLevel).forEach(function (d) { - if (Object.prototype.hasOwnProperty.call(data, d)) { - this.warn('typo', d, typos.topLevel[d]) - } - }, this) - }, - - fixScriptsField: function (data) { - if (!data.scripts) { - return - } - if (typeof data.scripts !== 'object') { - this.warn('nonObjectScripts') - delete data.scripts - return - } - Object.keys(data.scripts).forEach(function (k) { - if (typeof data.scripts[k] !== 'string') { - this.warn('nonStringScript') - delete data.scripts[k] - } else if (typos.script[k] && !data.scripts[typos.script[k]]) { - this.warn('typo', k, typos.script[k], 'scripts') - } - }, this) - }, - - fixFilesField: function (data) { - var files = data.files - if (files && !Array.isArray(files)) { - this.warn('nonArrayFiles') - delete data.files - } else if (data.files) { - data.files = data.files.filter(function (file) { - if (!file || typeof file !== 'string') { - this.warn('invalidFilename', file) - return false - } else { - return true - } - }, this) - } - }, - - fixBinField: function (data) { - if (!data.bin) { - return - } - if (typeof data.bin === 'string') { - var b = {} - var match - if (match = data.name.match(/^@[^/]+[/](.*)$/)) { - b[match[1]] = data.bin - } else { - b[data.name] = data.bin - } - data.bin = b - } - }, - - fixManField: function (data) { - if (!data.man) { - return - } - if (typeof data.man === 'string') { - data.man = [data.man] - } - }, - fixBundleDependenciesField: function (data) { - var bdd = 'bundledDependencies' - var bd = 'bundleDependencies' - if (data[bdd] && !data[bd]) { - data[bd] = data[bdd] - delete data[bdd] - } - if (data[bd] && !Array.isArray(data[bd])) { - this.warn('nonArrayBundleDependencies') - delete data[bd] - } else if (data[bd]) { - data[bd] = data[bd].filter(function (bd) { - if (!bd || typeof bd !== 'string') { - this.warn('nonStringBundleDependency', bd) - return false - } else { - if (!data.dependencies) { - data.dependencies = {} - } - if (!Object.prototype.hasOwnProperty.call(data.dependencies, bd)) { - this.warn('nonDependencyBundleDependency', bd) - data.dependencies[bd] = '*' - } - return true - } - }, this) - } - }, - - fixDependencies: function (data, strict) { - objectifyDeps(data, this.warn) - addOptionalDepsToDeps(data, this.warn) - this.fixBundleDependenciesField(data) - - ;['dependencies', 'devDependencies'].forEach(function (deps) { - if (!(deps in data)) { - return - } - if (!data[deps] || typeof data[deps] !== 'object') { - this.warn('nonObjectDependencies', deps) - delete data[deps] - return - } - Object.keys(data[deps]).forEach(function (d) { - var r = data[deps][d] - if (typeof r !== 'string') { - this.warn('nonStringDependency', d, JSON.stringify(r)) - delete data[deps][d] - } - var hosted = hostedGitInfo.fromUrl(data[deps][d]) - if (hosted) { - data[deps][d] = hosted.toString() - } - }, this) - }, this) - }, - - fixModulesField: function (data) { - if (data.modules) { - this.warn('deprecatedModules') - delete data.modules - } - }, - - fixKeywordsField: function (data) { - if (typeof data.keywords === 'string') { - data.keywords = data.keywords.split(/,\s+/) - } - if (data.keywords && !Array.isArray(data.keywords)) { - delete data.keywords - this.warn('nonArrayKeywords') - } else if (data.keywords) { - data.keywords = data.keywords.filter(function (kw) { - if (typeof kw !== 'string' || !kw) { - this.warn('nonStringKeyword') - return false - } else { - return true - } - }, this) - } - }, - - fixVersionField: function (data, strict) { - // allow "loose" semver 1.0 versions in non-strict mode - // enforce strict semver 2.0 compliance in strict mode - var loose = !strict - if (!data.version) { - data.version = '' - return true - } - if (!isValidSemver(data.version, loose)) { - throw new Error('Invalid version: "' + data.version + '"') - } - data.version = cleanSemver(data.version, loose) - return true - }, - - fixPeople: function (data) { - modifyPeople(data, unParsePerson) - modifyPeople(data, parsePerson) - }, - - fixNameField: function (data, options) { - if (typeof options === 'boolean') { - options = { strict: options } - } else if (typeof options === 'undefined') { - options = {} - } - var strict = options.strict - if (!data.name && !strict) { - data.name = '' - return - } - if (typeof data.name !== 'string') { - throw new Error('name field must be a string.') - } - if (!strict) { - data.name = data.name.trim() - } - ensureValidName(data.name, strict, options.allowLegacyCase) - if (isBuiltinModule(data.name)) { - this.warn('conflictingName', data.name) - } - }, - - fixDescriptionField: function (data) { - if (data.description && typeof data.description !== 'string') { - this.warn('nonStringDescription') - delete data.description - } - if (data.readme && !data.description) { - data.description = extractDescription(data.readme) - } - if (data.description === undefined) { - delete data.description - } - if (!data.description) { - this.warn('missingDescription') - } - }, - - fixReadmeField: function (data) { - if (!data.readme) { - this.warn('missingReadme') - data.readme = 'ERROR: No README data found!' - } - }, - - fixBugsField: function (data) { - if (!data.bugs && data.repository && data.repository.url) { - var hosted = hostedGitInfo.fromUrl(data.repository.url) - if (hosted && hosted.bugs()) { - data.bugs = { url: hosted.bugs() } - } - } else if (data.bugs) { - if (typeof data.bugs === 'string') { - if (isEmail(data.bugs)) { - data.bugs = { email: data.bugs } - /* eslint-disable-next-line node/no-deprecated-api */ - } else if (url.parse(data.bugs).protocol) { - data.bugs = { url: data.bugs } - } else { - this.warn('nonEmailUrlBugsString') - } - } else { - bugsTypos(data.bugs, this.warn) - var oldBugs = data.bugs - data.bugs = {} - if (oldBugs.url) { - /* eslint-disable-next-line node/no-deprecated-api */ - if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) { - data.bugs.url = oldBugs.url - } else { - this.warn('nonUrlBugsUrlField') - } - } - if (oldBugs.email) { - if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) { - data.bugs.email = oldBugs.email - } else { - this.warn('nonEmailBugsEmailField') - } - } - } - if (!data.bugs.email && !data.bugs.url) { - delete data.bugs - this.warn('emptyNormalizedBugs') - } - } - }, - - fixHomepageField: function (data) { - if (!data.homepage && data.repository && data.repository.url) { - var hosted = hostedGitInfo.fromUrl(data.repository.url) - if (hosted && hosted.docs()) { - data.homepage = hosted.docs() - } - } - if (!data.homepage) { - return - } - - if (typeof data.homepage !== 'string') { - this.warn('nonUrlHomepage') - return delete data.homepage - } - /* eslint-disable-next-line node/no-deprecated-api */ - if (!url.parse(data.homepage).protocol) { - data.homepage = 'http://' + data.homepage - } - }, - - fixLicenseField: function (data) { - const license = data.license || data.licence - if (!license) { - return this.warn('missingLicense') - } - if ( - typeof (license) !== 'string' || - license.length < 1 || - license.trim() === '' - ) { - return this.warn('invalidLicense') - } - if (!validateLicense(license).validForNewPackages) { - return this.warn('invalidLicense') - } - }, -} - -function isValidScopedPackageName (spec) { - if (spec.charAt(0) !== '@') { - return false - } - - var rest = spec.slice(1).split('/') - if (rest.length !== 2) { - return false - } - - return rest[0] && rest[1] && - rest[0] === encodeURIComponent(rest[0]) && - rest[1] === encodeURIComponent(rest[1]) -} - -function isCorrectlyEncodedName (spec) { - return !spec.match(/[/@\s+%:]/) && - spec === encodeURIComponent(spec) -} - -function ensureValidName (name, strict, allowLegacyCase) { - if (name.charAt(0) === '.' || - !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) || - (strict && (!allowLegacyCase) && name !== name.toLowerCase()) || - name.toLowerCase() === 'node_modules' || - name.toLowerCase() === 'favicon.ico') { - throw new Error('Invalid name: ' + JSON.stringify(name)) - } -} - -function modifyPeople (data, fn) { - if (data.author) { - data.author = fn(data.author) - }['maintainers', 'contributors'].forEach(function (set) { - if (!Array.isArray(data[set])) { - return - } - data[set] = data[set].map(fn) - }) - return data -} - -function unParsePerson (person) { - if (typeof person === 'string') { - return person - } - var name = person.name || '' - var u = person.url || person.web - var url = u ? (' (' + u + ')') : '' - var e = person.email || person.mail - var email = e ? (' <' + e + '>') : '' - return name + email + url -} - -function parsePerson (person) { - if (typeof person !== 'string') { - return person - } - var name = person.match(/^([^(<]+)/) - var url = person.match(/\(([^()]+)\)/) - var email = person.match(/<([^<>]+)>/) - var obj = {} - if (name && name[0].trim()) { - obj.name = name[0].trim() - } - if (email) { - obj.email = email[1] - } - if (url) { - obj.url = url[1] - } - return obj -} - -function addOptionalDepsToDeps (data, warn) { - var o = data.optionalDependencies - if (!o) { - return - } - var d = data.dependencies || {} - Object.keys(o).forEach(function (k) { - d[k] = o[k] - }) - data.dependencies = d -} - -function depObjectify (deps, type, warn) { - if (!deps) { - return {} - } - if (typeof deps === 'string') { - deps = deps.trim().split(/[\n\r\s\t ,]+/) - } - if (!Array.isArray(deps)) { - return deps - } - warn('deprecatedArrayDependencies', type) - var o = {} - deps.filter(function (d) { - return typeof d === 'string' - }).forEach(function (d) { - d = d.trim().split(/(:?[@\s><=])/) - var dn = d.shift() - var dv = d.join('') - dv = dv.trim() - dv = dv.replace(/^@/, '') - o[dn] = dv - }) - return o -} - -function objectifyDeps (data, warn) { - depTypes.forEach(function (type) { - if (!data[type]) { - return - } - data[type] = depObjectify(data[type], type, warn) - }) -} - -function bugsTypos (bugs, warn) { - if (!bugs) { - return - } - Object.keys(bugs).forEach(function (k) { - if (typos.bugs[k]) { - warn('typo', k, typos.bugs[k], 'bugs') - bugs[typos.bugs[k]] = bugs[k] - delete bugs[k] - } - }) -} diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/make_warning.js deleted file mode 100644 index 3be9c86539952..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/lib/make_warning.js +++ /dev/null @@ -1,22 +0,0 @@ -var util = require('util') -var messages = require('./warning_messages.json') - -module.exports = function () { - var args = Array.prototype.slice.call(arguments, 0) - var warningName = args.shift() - if (warningName === 'typo') { - return makeTypoWarning.apply(null, args) - } else { - var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'" - args.unshift(msgTemplate) - return util.format.apply(null, args) - } -} - -function makeTypoWarning (providedName, probableName, field) { - if (field) { - providedName = field + "['" + providedName + "']" - probableName = field + "['" + probableName + "']" - } - return util.format(messages.typo, providedName, probableName) -} diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/lib/normalize.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/normalize.js deleted file mode 100644 index bf71d2c1e2235..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/lib/normalize.js +++ /dev/null @@ -1,48 +0,0 @@ -module.exports = normalize - -var fixer = require('./fixer') -normalize.fixer = fixer - -var makeWarning = require('./make_warning') - -var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts', - 'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license'] -var otherThingsToFix = ['dependencies', 'people', 'typos'] - -var thingsToFix = fieldsToFix.map(function (fieldName) { - return ucFirst(fieldName) + 'Field' -}) -// two ways to do this in CoffeeScript on only one line, sub-70 chars: -// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field" -// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix) -thingsToFix = thingsToFix.concat(otherThingsToFix) - -function normalize (data, warn, strict) { - if (warn === true) { - warn = null - strict = true - } - if (!strict) { - strict = false - } - if (!warn || data.private) { - warn = function (msg) { /* noop */ } - } - - if (data.scripts && - data.scripts.install === 'node-gyp rebuild' && - !data.scripts.preinstall) { - data.gypfile = true - } - fixer.warn = function () { - warn(makeWarning.apply(null, arguments)) - } - thingsToFix.forEach(function (thingName) { - fixer['fix' + ucFirst(thingName)](data, strict) - }) - data._id = data.name + '@' + data.version -} - -function ucFirst (string) { - return string.charAt(0).toUpperCase() + string.slice(1) -} diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/read-package-json/node_modules/normalize-package-data/lib/safe_format.js deleted file mode 100644 index 5fc888e5450cd..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/lib/safe_format.js +++ /dev/null @@ -1,11 +0,0 @@ -var util = require('util') - -module.exports = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.forEach(function (arg) { - if (!arg) { - throw new TypeError('Bad arguments.') - } - }) - return util.format.apply(null, arguments) -} diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/lib/typos.json b/node_modules/read-package-json/node_modules/normalize-package-data/lib/typos.json deleted file mode 100644 index 7f9dd283b30ff..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/lib/typos.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "topLevel": { - "dependancies": "dependencies" - ,"dependecies": "dependencies" - ,"depdenencies": "dependencies" - ,"devEependencies": "devDependencies" - ,"depends": "dependencies" - ,"dev-dependencies": "devDependencies" - ,"devDependences": "devDependencies" - ,"devDepenencies": "devDependencies" - ,"devdependencies": "devDependencies" - ,"repostitory": "repository" - ,"repo": "repository" - ,"prefereGlobal": "preferGlobal" - ,"hompage": "homepage" - ,"hampage": "homepage" - ,"autohr": "author" - ,"autor": "author" - ,"contributers": "contributors" - ,"publicationConfig": "publishConfig" - ,"script": "scripts" - }, - "bugs": { "web": "url", "name": "url" }, - "script": { "server": "start", "tests": "test" } -} diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/read-package-json/node_modules/normalize-package-data/lib/warning_messages.json deleted file mode 100644 index 4890f506ed965..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/lib/warning_messages.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field" - ,"missingRepository": "No repository field." - ,"brokenGitUrl": "Probably broken git url: %s" - ,"nonObjectScripts": "scripts must be an object" - ,"nonStringScript": "script values must be string commands" - ,"nonArrayFiles": "Invalid 'files' member" - ,"invalidFilename": "Invalid filename in 'files' list: %s" - ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names" - ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s" - ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s" - ,"nonObjectDependencies": "%s field must be an object" - ,"nonStringDependency": "Invalid dependency: %s %s" - ,"deprecatedArrayDependencies": "specifying %s as array is deprecated" - ,"deprecatedModules": "modules field is deprecated" - ,"nonArrayKeywords": "keywords should be an array of strings" - ,"nonStringKeyword": "keywords should be an array of strings" - ,"conflictingName": "%s is also the name of a node core module." - ,"nonStringDescription": "'description' field should be a string" - ,"missingDescription": "No description" - ,"missingReadme": "No README data" - ,"missingLicense": "No license field." - ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}" - ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted." - ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted." - ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted." - ,"nonUrlHomepage": "homepage field must be a string url. Deleted." - ,"invalidLicense": "license should be a valid SPDX license expression" - ,"typo": "%s should probably be %s." -} diff --git a/node_modules/read-package-json/node_modules/normalize-package-data/package.json b/node_modules/read-package-json/node_modules/normalize-package-data/package.json deleted file mode 100644 index a6f1244eb5a25..0000000000000 --- a/node_modules/read-package-json/node_modules/normalize-package-data/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "normalize-package-data", - "version": "4.0.0", - "author": "GitHub Inc.", - "description": "Normalizes data that can be found in package.json files.", - "license": "BSD-2-Clause", - "repository": { - "type": "git", - "url": "git://github.com/npm/normalize-package-data.git" - }, - "main": "lib/normalize.js", - "scripts": { - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preversion": "npm test", - "test": "tap", - "npmclilint": "npmcli-lint", - "lint": "eslint '**/*.js'", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "postsnap": "npm run lintfix --", - "postlint": "npm-template-check", - "template-copy": "npm-template-copy --force", - "snap": "tap" - }, - "dependencies": { - "hosted-git-info": "^5.0.0", - "is-core-module": "^2.8.1", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "tap": "^15.0.9" - }, - "files": [ - "bin", - "lib" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - }, - "templateOSS": { - "version": "2.9.2" - }, - "tap": { - "branches": 86, - "functions": 92, - "lines": 86, - "statements": 86 - } -} diff --git a/package-lock.json b/package-lock.json index 7ac0c36039878..a1def5dfb5ebc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5438,17 +5438,39 @@ } }, "node_modules/normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", + "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", + "inBundle": true, "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" + "hosted-git-info": "^5.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" }, "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, + "node_modules/normalize-package-data/node_modules/hosted-git-info": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", + "inBundle": true, + "dependencies": { + "lru-cache": "^7.5.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, + "node_modules/normalize-package-data/node_modules/lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", + "inBundle": true, + "engines": { + "node": ">=12" } }, "node_modules/normalize-path": { @@ -6481,42 +6503,6 @@ "node": ">=10" } }, - "node_modules/read-package-json/node_modules/hosted-git-info": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", - "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", - "inBundle": true, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, - "node_modules/read-package-json/node_modules/lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/read-package-json/node_modules/normalize-package-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", - "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", - "inBundle": true, - "dependencies": { - "hosted-git-info": "^5.0.0", - "is-core-module": "^2.8.1", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, "node_modules/read-package-tree": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/read-package-tree/-/read-package-tree-5.3.1.tgz", @@ -10714,7 +10700,7 @@ "version": "6.0.1", "license": "ISC", "dependencies": { - "normalize-package-data": "^3.0.2", + "normalize-package-data": "^4.0.0", "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", "semver": "^7.1.3", @@ -14518,7 +14504,7 @@ "libnpmpack": "^4.0.0", "lodash.clonedeep": "^4.5.0", "nock": "^12.0.2", - "normalize-package-data": "^3.0.2", + "normalize-package-data": "4.0.0", "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", "semver": "^7.1.3", @@ -15104,14 +15090,29 @@ } }, "normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", + "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", "requires": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" + "hosted-git-info": "^5.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "dependencies": { + "hosted-git-info": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", + "requires": { + "lru-cache": "^7.5.1" + } + }, + "lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" + } } }, "normalize-path": { @@ -15882,32 +15883,6 @@ "json-parse-even-better-errors": "^2.3.1", "normalize-package-data": "^4.0.0", "npm-normalize-package-bin": "^1.0.1" - }, - "dependencies": { - "hosted-git-info": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", - "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", - "requires": { - "lru-cache": "^7.5.1" - } - }, - "lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" - }, - "normalize-package-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-4.0.0.tgz", - "integrity": "sha512-m+GL22VXJKkKbw62ZaBBjv8u6IE3UI4Mh5QakIqs3fWiKe0Xyi6L97hakwZK41/LD4R/2ly71Bayx0NLMwLA/g==", - "requires": { - "hosted-git-info": "^5.0.0", - "is-core-module": "^2.8.1", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - } - } } }, "read-package-json-fast": { diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json index 50b470e134b29..59f584571617b 100644 --- a/workspaces/libnpmpublish/package.json +++ b/workspaces/libnpmpublish/package.json @@ -44,7 +44,7 @@ "bugs": "https://github.com/npm/cli/issues", "homepage": "https://npmjs.com/package/libnpmpublish", "dependencies": { - "normalize-package-data": "^3.0.2", + "normalize-package-data": "^4.0.0", "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", "semver": "^7.1.3", From 264901b4dc006dbe398ae2e4ec82d37cffd1750b Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 15 Mar 2022 12:54:08 -0700 Subject: [PATCH 10/11] deps: hosted-git-info@5.0.0 --- node_modules/hosted-git-info/git-host-info.js | 184 ------ node_modules/hosted-git-info/index.js | 237 ------- .../hosted-git-info/lib/git-host-info.js | 0 .../hosted-git-info/{ => lib}/git-host.js | 0 .../hosted-git-info/lib/index.js | 0 .../node_modules/lru-cache/LICENSE | 0 .../node_modules/lru-cache/index.js | 0 .../node_modules/lru-cache/package.json | 0 node_modules/hosted-git-info/package.json | 29 +- .../node_modules/hosted-git-info/LICENSE | 13 - .../hosted-git-info/lib/git-host.js | 110 ---- .../node_modules/hosted-git-info/package.json | 56 -- .../node_modules/hosted-git-info/LICENSE | 13 - .../hosted-git-info/lib/git-host-info.js | 185 ------ .../hosted-git-info/lib/git-host.js | 110 ---- .../node_modules/hosted-git-info/lib/index.js | 244 ------- .../node_modules/hosted-git-info/package.json | 56 -- .../node_modules/lru-cache/LICENSE | 15 - .../node_modules/lru-cache/index.js | 615 ------------------ .../node_modules/lru-cache/package.json | 43 -- package-lock.json | 110 +--- package.json | 2 +- 22 files changed, 45 insertions(+), 1977 deletions(-) delete mode 100644 node_modules/hosted-git-info/git-host-info.js delete mode 100644 node_modules/hosted-git-info/index.js rename node_modules/{normalize-package-data/node_modules => }/hosted-git-info/lib/git-host-info.js (100%) rename node_modules/hosted-git-info/{ => lib}/git-host.js (100%) rename node_modules/{normalize-package-data/node_modules => }/hosted-git-info/lib/index.js (100%) rename node_modules/{normalize-package-data => hosted-git-info}/node_modules/lru-cache/LICENSE (100%) rename node_modules/{normalize-package-data => hosted-git-info}/node_modules/lru-cache/index.js (100%) rename node_modules/{normalize-package-data => hosted-git-info}/node_modules/lru-cache/package.json (100%) delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host.js delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/package.json delete mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE delete mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host-info.js delete mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host.js delete mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js delete mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/package.json delete mode 100644 node_modules/npm-package-arg/node_modules/lru-cache/LICENSE delete mode 100644 node_modules/npm-package-arg/node_modules/lru-cache/index.js delete mode 100644 node_modules/npm-package-arg/node_modules/lru-cache/package.json diff --git a/node_modules/hosted-git-info/git-host-info.js b/node_modules/hosted-git-info/git-host-info.js deleted file mode 100644 index ba55248e7d62d..0000000000000 --- a/node_modules/hosted-git-info/git-host-info.js +++ /dev/null @@ -1,184 +0,0 @@ -'use strict' -const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' -const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' - -const defaults = { - sshtemplate: ({ domain, user, project, committish }) => `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, - sshurltemplate: ({ domain, user, project, committish }) => `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - browsetemplate: ({ domain, user, project, committish, treepath }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, - browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'master')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, - docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, - httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/raw/${maybeEncode(committish) || 'master'}/${path}`, - shortcuttemplate: ({ type, user, project, committish }) => `${type}:${user}/${project}${maybeJoin('#', committish)}`, - pathtemplate: ({ user, project, committish }) => `${user}/${project}${maybeJoin('#', committish)}`, - bugstemplate: ({ domain, user, project }) => `https://${domain}/${user}/${project}/issues`, - hashformat: formatHashFragment -} - -const gitHosts = {} -gitHosts.github = Object.assign({}, defaults, { - // First two are insecure and generally shouldn't be used any more, but - // they are still supported. - protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'github.com', - treepath: 'tree', - filetemplate: ({ auth, user, project, committish, path }) => `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish) || 'master'}/${path}`, - gittemplate: ({ auth, domain, user, project, committish }) => `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, - extract: (url) => { - let [, user, project, type, committish] = url.pathname.split('/', 5) - if (type && type !== 'tree') { - return - } - - if (!type) { - committish = url.hash.slice(1) - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish } - } -}) - -gitHosts.bitbucket = Object.assign({}, defaults, { - protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'bitbucket.org', - treepath: 'src', - tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/get/${maybeEncode(committish) || 'master'}.tar.gz`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - if (['get'].includes(aux)) { - return - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - } -}) - -gitHosts.gitlab = Object.assign({}, defaults, { - protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'gitlab.com', - treepath: 'tree', - httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish) || 'master'}`, - extract: (url) => { - const path = url.pathname.slice(1) - if (path.includes('/-/') || path.includes('/archive.tar.gz')) { - return - } - - const segments = path.split('/') - let project = segments.pop() - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - const user = segments.join('/') - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - } -}) - -gitHosts.gist = Object.assign({}, defaults, { - protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'gist.github.com', - sshtemplate: ({ domain, project, committish }) => `git@${domain}:${project}.git${maybeJoin('#', committish)}`, - sshurltemplate: ({ domain, project, committish }) => `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, - browsetemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, - browsefiletemplate: ({ domain, project, committish, path, hashformat }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, - docstemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, - httpstemplate: ({ domain, project, committish }) => `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, - filetemplate: ({ user, project, committish, path }) => `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, - shortcuttemplate: ({ type, project, committish }) => `${type}:${project}${maybeJoin('#', committish)}`, - pathtemplate: ({ project, committish }) => `${project}${maybeJoin('#', committish)}`, - bugstemplate: ({ domain, project }) => `https://${domain}/${project}`, - gittemplate: ({ domain, project, committish }) => `git://${domain}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ project, committish }) => `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - if (aux === 'raw') { - return - } - - if (!project) { - if (!user) { - return - } - - project = user - user = null - } - - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - return { user, project, committish: url.hash.slice(1) } - }, - hashformat: function (fragment) { - return fragment && 'file-' + formatHashFragment(fragment) - } -}) - -gitHosts.sourcehut = Object.assign({}, defaults, { - protocols: ['git+ssh:', 'https:'], - domain: 'git.sr.ht', - treepath: 'tree', - browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'main')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, - filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'main'}/${path}`, - httpstemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'main'}.tar.gz`, - bugstemplate: ({ domain, user, project }) => `https://todo.sr.ht/${user}/${project}`, - docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - - // tarball url - if (['archive'].includes(aux)) { - return - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - } -}) - -const names = Object.keys(gitHosts) -gitHosts.byShortcut = {} -gitHosts.byDomain = {} -for (const name of names) { - gitHosts.byShortcut[`${name}:`] = name - gitHosts.byDomain[gitHosts[name].domain] = name -} - -function formatHashFragment (fragment) { - return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') -} - -module.exports = gitHosts diff --git a/node_modules/hosted-git-info/index.js b/node_modules/hosted-git-info/index.js deleted file mode 100644 index f35c570c46b59..0000000000000 --- a/node_modules/hosted-git-info/index.js +++ /dev/null @@ -1,237 +0,0 @@ -'use strict' -const url = require('url') -const gitHosts = require('./git-host-info.js') -const GitHost = module.exports = require('./git-host.js') -const LRU = require('lru-cache') -const cache = new LRU({ max: 1000 }) - -const protocolToRepresentationMap = { - 'git+ssh:': 'sshurl', - 'git+https:': 'https', - 'ssh:': 'sshurl', - 'git:': 'git' -} - -function protocolToRepresentation (protocol) { - return protocolToRepresentationMap[protocol] || protocol.slice(0, -1) -} - -const authProtocols = { - 'git:': true, - 'https:': true, - 'git+https:': true, - 'http:': true, - 'git+http:': true -} - -const knownProtocols = Object.keys(gitHosts.byShortcut).concat(['http:', 'https:', 'git:', 'git+ssh:', 'git+https:', 'ssh:']) - -module.exports.fromUrl = function (giturl, opts) { - if (typeof giturl !== 'string') { - return - } - - const key = giturl + JSON.stringify(opts || {}) - - if (!cache.has(key)) { - cache.set(key, fromUrl(giturl, opts)) - } - - return cache.get(key) -} - -function fromUrl (giturl, opts) { - if (!giturl) { - return - } - - const url = isGitHubShorthand(giturl) ? 'github:' + giturl : correctProtocol(giturl) - const parsed = parseGitUrl(url) - if (!parsed) { - return parsed - } - - const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] - const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.') ? parsed.hostname.slice(4) : parsed.hostname] - const gitHostName = gitHostShortcut || gitHostDomain - if (!gitHostName) { - return - } - - const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] - let auth = null - if (authProtocols[parsed.protocol] && (parsed.username || parsed.password)) { - auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` - } - - let committish = null - let user = null - let project = null - let defaultRepresentation = null - - try { - if (gitHostShortcut) { - let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname - const firstAt = pathname.indexOf('@') - // we ignore auth for shortcuts, so just trim it out - if (firstAt > -1) { - pathname = pathname.slice(firstAt + 1) - } - - const lastSlash = pathname.lastIndexOf('/') - if (lastSlash > -1) { - user = decodeURIComponent(pathname.slice(0, lastSlash)) - // we want nulls only, never empty strings - if (!user) { - user = null - } - project = decodeURIComponent(pathname.slice(lastSlash + 1)) - } else { - project = decodeURIComponent(pathname) - } - - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (parsed.hash) { - committish = decodeURIComponent(parsed.hash.slice(1)) - } - - defaultRepresentation = 'shortcut' - } else { - if (!gitHostInfo.protocols.includes(parsed.protocol)) { - return - } - - const segments = gitHostInfo.extract(parsed) - if (!segments) { - return - } - - user = segments.user && decodeURIComponent(segments.user) - project = decodeURIComponent(segments.project) - committish = decodeURIComponent(segments.committish) - defaultRepresentation = protocolToRepresentation(parsed.protocol) - } - } catch (err) { - /* istanbul ignore else */ - if (err instanceof URIError) { - return - } else { - throw err - } - } - - return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) -} - -// accepts input like git:github.com:user/repo and inserts the // after the first : -const correctProtocol = (arg) => { - const firstColon = arg.indexOf(':') - const proto = arg.slice(0, firstColon + 1) - if (knownProtocols.includes(proto)) { - return arg - } - - const firstAt = arg.indexOf('@') - if (firstAt > -1) { - if (firstAt > firstColon) { - return `git+ssh://${arg}` - } else { - return arg - } - } - - const doubleSlash = arg.indexOf('//') - if (doubleSlash === firstColon + 1) { - return arg - } - - return arg.slice(0, firstColon + 1) + '//' + arg.slice(firstColon + 1) -} - -// look for github shorthand inputs, such as npm/cli -const isGitHubShorthand = (arg) => { - // it cannot contain whitespace before the first # - // it cannot start with a / because that's probably an absolute file path - // but it must include a slash since repos are username/repository - // it cannot start with a . because that's probably a relative file path - // it cannot start with an @ because that's a scoped package if it passes the other tests - // it cannot contain a : before a # because that tells us that there's a protocol - // a second / may not exist before a # - const firstHash = arg.indexOf('#') - const firstSlash = arg.indexOf('/') - const secondSlash = arg.indexOf('/', firstSlash + 1) - const firstColon = arg.indexOf(':') - const firstSpace = /\s/.exec(arg) - const firstAt = arg.indexOf('@') - - const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) - const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) - const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) - const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) - const hasSlash = firstSlash > 0 - // if a # is found, what we really want to know is that the character immediately before # is not a / - const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') - const doesNotStartWithDot = !arg.startsWith('.') - - return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && secondSlashOnlyAfterHash -} - -// attempt to correct an scp style url so that it will parse with `new URL()` -const correctUrl = (giturl) => { - const firstAt = giturl.indexOf('@') - const lastHash = giturl.lastIndexOf('#') - let firstColon = giturl.indexOf(':') - let lastColon = giturl.lastIndexOf(':', lastHash > -1 ? lastHash : Infinity) - - let corrected - if (lastColon > firstAt) { - // the last : comes after the first @ (or there is no @) - // like it would in: - // proto://hostname.com:user/repo - // username@hostname.com:user/repo - // :password@hostname.com:user/repo - // username:password@hostname.com:user/repo - // proto://username@hostname.com:user/repo - // proto://:password@hostname.com:user/repo - // proto://username:password@hostname.com:user/repo - // then we replace the last : with a / to create a valid path - corrected = giturl.slice(0, lastColon) + '/' + giturl.slice(lastColon + 1) - // // and we find our new : positions - firstColon = corrected.indexOf(':') - lastColon = corrected.lastIndexOf(':') - } - - if (firstColon === -1 && giturl.indexOf('//') === -1) { - // we have no : at all - // as it would be in: - // username@hostname.com/user/repo - // then we prepend a protocol - corrected = `git+ssh://${corrected}` - } - - return corrected -} - -// try to parse the url as its given to us, if that throws -// then we try to clean the url and parse that result instead -// THIS FUNCTION SHOULD NEVER THROW -const parseGitUrl = (giturl) => { - let result - try { - result = new url.URL(giturl) - } catch (err) {} - - if (result) { - return result - } - - const correctedUrl = correctUrl(giturl) - try { - result = new url.URL(correctedUrl) - } catch (err) {} - - return result -} diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host-info.js b/node_modules/hosted-git-info/lib/git-host-info.js similarity index 100% rename from node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host-info.js rename to node_modules/hosted-git-info/lib/git-host-info.js diff --git a/node_modules/hosted-git-info/git-host.js b/node_modules/hosted-git-info/lib/git-host.js similarity index 100% rename from node_modules/hosted-git-info/git-host.js rename to node_modules/hosted-git-info/lib/git-host.js diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js b/node_modules/hosted-git-info/lib/index.js similarity index 100% rename from node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js rename to node_modules/hosted-git-info/lib/index.js diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/LICENSE b/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/normalize-package-data/node_modules/lru-cache/LICENSE rename to node_modules/hosted-git-info/node_modules/lru-cache/LICENSE diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/index.js similarity index 100% rename from node_modules/normalize-package-data/node_modules/lru-cache/index.js rename to node_modules/hosted-git-info/node_modules/lru-cache/index.js diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/package.json similarity index 100% rename from node_modules/normalize-package-data/node_modules/lru-cache/package.json rename to node_modules/hosted-git-info/node_modules/lru-cache/package.json diff --git a/node_modules/hosted-git-info/package.json b/node_modules/hosted-git-info/package.json index b145e62240805..0153b0852cbf4 100644 --- a/node_modules/hosted-git-info/package.json +++ b/node_modules/hosted-git-info/package.json @@ -1,8 +1,8 @@ { "name": "hosted-git-info", - "version": "4.1.0", + "version": "5.0.0", "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", - "main": "index.js", + "main": "./lib/index.js", "repository": { "type": "git", "url": "git+https://github.com/npm/hosted-git-info.git" @@ -13,39 +13,44 @@ "bitbucket", "gitlab" ], - "author": "Rebecca Turner (http://re-becca.org)", + "author": "GitHub Inc.", "license": "ISC", "bugs": { "url": "https://github.com/npm/hosted-git-info/issues" }, "homepage": "https://github.com/npm/hosted-git-info", "scripts": { - "posttest": "standard", + "posttest": "npm run lint", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "preversion": "npm test", "snap": "tap", "test": "tap", - "test:coverage": "tap --coverage-report=html" + "test:coverage": "tap --coverage-report=html", + "lint": "eslint '**/*.js'", + "postlint": "npm-template-check", + "template-copy": "npm-template-copy --force", + "lintfix": "npm run lint -- --fix" }, "dependencies": { - "lru-cache": "^6.0.0" + "lru-cache": "^7.5.1" }, "devDependencies": { - "standard": "^16.0.3", - "standard-version": "^9.1.0", + "@npmcli/template-oss": "^2.9.2", "tap": "^15.1.6" }, "files": [ - "index.js", - "git-host.js", - "git-host-info.js" + "bin", + "lib" ], "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" }, "tap": { "color": 1, "coverage": true + }, + "templateOSS": { + "version": "2.9.2" } } diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE b/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE deleted file mode 100644 index 45055763dc838..0000000000000 --- a/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2015, Rebecca Turner - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host.js deleted file mode 100644 index 8a975e92e58bb..0000000000000 --- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/git-host.js +++ /dev/null @@ -1,110 +0,0 @@ -'use strict' -const gitHosts = require('./git-host-info.js') - -class GitHost { - constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { - Object.assign(this, gitHosts[type]) - this.type = type - this.user = user - this.auth = auth - this.project = project - this.committish = committish - this.default = defaultRepresentation - this.opts = opts - } - - hash () { - return this.committish ? `#${this.committish}` : '' - } - - ssh (opts) { - return this._fill(this.sshtemplate, opts) - } - - _fill (template, opts) { - if (typeof template === 'function') { - const options = { ...this, ...this.opts, ...opts } - - // the path should always be set so we don't end up with 'undefined' in urls - if (!options.path) { - options.path = '' - } - - // template functions will insert the leading slash themselves - if (options.path.startsWith('/')) { - options.path = options.path.slice(1) - } - - if (options.noCommittish) { - options.committish = null - } - - const result = template(options) - return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result - } - - return null - } - - sshurl (opts) { - return this._fill(this.sshurltemplate, opts) - } - - browse (path, fragment, opts) { - // not a string, treat path as opts - if (typeof path !== 'string') { - return this._fill(this.browsetemplate, path) - } - - if (typeof fragment !== 'string') { - opts = fragment - fragment = null - } - return this._fill(this.browsefiletemplate, { ...opts, fragment, path }) - } - - docs (opts) { - return this._fill(this.docstemplate, opts) - } - - bugs (opts) { - return this._fill(this.bugstemplate, opts) - } - - https (opts) { - return this._fill(this.httpstemplate, opts) - } - - git (opts) { - return this._fill(this.gittemplate, opts) - } - - shortcut (opts) { - return this._fill(this.shortcuttemplate, opts) - } - - path (opts) { - return this._fill(this.pathtemplate, opts) - } - - tarball (opts) { - return this._fill(this.tarballtemplate, { ...opts, noCommittish: false }) - } - - file (path, opts) { - return this._fill(this.filetemplate, { ...opts, path }) - } - - getDefaultRepresentation () { - return this.default - } - - toString (opts) { - if (this.default && typeof this[this.default] === 'function') { - return this[this.default](opts) - } - - return this.sshurl(opts) - } -} -module.exports = GitHost diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json b/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json deleted file mode 100644 index 0153b0852cbf4..0000000000000 --- a/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "hosted-git-info", - "version": "5.0.0", - "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", - "main": "./lib/index.js", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/hosted-git-info.git" - }, - "keywords": [ - "git", - "github", - "bitbucket", - "gitlab" - ], - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/npm/hosted-git-info/issues" - }, - "homepage": "https://github.com/npm/hosted-git-info", - "scripts": { - "posttest": "npm run lint", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preversion": "npm test", - "snap": "tap", - "test": "tap", - "test:coverage": "tap --coverage-report=html", - "lint": "eslint '**/*.js'", - "postlint": "npm-template-check", - "template-copy": "npm-template-copy --force", - "lintfix": "npm run lint -- --fix" - }, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "tap": "^15.1.6" - }, - "files": [ - "bin", - "lib" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - }, - "tap": { - "color": 1, - "coverage": true - }, - "templateOSS": { - "version": "2.9.2" - } -} diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE b/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE deleted file mode 100644 index 45055763dc838..0000000000000 --- a/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2015, Rebecca Turner - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host-info.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host-info.js deleted file mode 100644 index 9a9720fa3c339..0000000000000 --- a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host-info.js +++ /dev/null @@ -1,185 +0,0 @@ -/* eslint-disable max-len */ -'use strict' -const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' -const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' - -const defaults = { - sshtemplate: ({ domain, user, project, committish }) => `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, - sshurltemplate: ({ domain, user, project, committish }) => `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - browsetemplate: ({ domain, user, project, committish, treepath }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, - browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'master')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, - docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, - httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/raw/${maybeEncode(committish) || 'master'}/${path}`, - shortcuttemplate: ({ type, user, project, committish }) => `${type}:${user}/${project}${maybeJoin('#', committish)}`, - pathtemplate: ({ user, project, committish }) => `${user}/${project}${maybeJoin('#', committish)}`, - bugstemplate: ({ domain, user, project }) => `https://${domain}/${user}/${project}/issues`, - hashformat: formatHashFragment, -} - -const gitHosts = {} -gitHosts.github = Object.assign({}, defaults, { - // First two are insecure and generally shouldn't be used any more, but - // they are still supported. - protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'github.com', - treepath: 'tree', - filetemplate: ({ auth, user, project, committish, path }) => `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish) || 'master'}/${path}`, - gittemplate: ({ auth, domain, user, project, committish }) => `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, - extract: (url) => { - let [, user, project, type, committish] = url.pathname.split('/', 5) - if (type && type !== 'tree') { - return - } - - if (!type) { - committish = url.hash.slice(1) - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish } - }, -}) - -gitHosts.bitbucket = Object.assign({}, defaults, { - protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'bitbucket.org', - treepath: 'src', - tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/get/${maybeEncode(committish) || 'master'}.tar.gz`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - if (['get'].includes(aux)) { - return - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - }, -}) - -gitHosts.gitlab = Object.assign({}, defaults, { - protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'gitlab.com', - treepath: 'tree', - httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish) || 'master'}`, - extract: (url) => { - const path = url.pathname.slice(1) - if (path.includes('/-/') || path.includes('/archive.tar.gz')) { - return - } - - const segments = path.split('/') - let project = segments.pop() - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - const user = segments.join('/') - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - }, -}) - -gitHosts.gist = Object.assign({}, defaults, { - protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'gist.github.com', - sshtemplate: ({ domain, project, committish }) => `git@${domain}:${project}.git${maybeJoin('#', committish)}`, - sshurltemplate: ({ domain, project, committish }) => `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, - browsetemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, - browsefiletemplate: ({ domain, project, committish, path, hashformat }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, - docstemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, - httpstemplate: ({ domain, project, committish }) => `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, - filetemplate: ({ user, project, committish, path }) => `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, - shortcuttemplate: ({ type, project, committish }) => `${type}:${project}${maybeJoin('#', committish)}`, - pathtemplate: ({ project, committish }) => `${project}${maybeJoin('#', committish)}`, - bugstemplate: ({ domain, project }) => `https://${domain}/${project}`, - gittemplate: ({ domain, project, committish }) => `git://${domain}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ project, committish }) => `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - if (aux === 'raw') { - return - } - - if (!project) { - if (!user) { - return - } - - project = user - user = null - } - - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - return { user, project, committish: url.hash.slice(1) } - }, - hashformat: function (fragment) { - return fragment && 'file-' + formatHashFragment(fragment) - }, -}) - -gitHosts.sourcehut = Object.assign({}, defaults, { - protocols: ['git+ssh:', 'https:'], - domain: 'git.sr.ht', - treepath: 'tree', - browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'main')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, - filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'main'}/${path}`, - httpstemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'main'}.tar.gz`, - bugstemplate: ({ domain, user, project }) => `https://todo.sr.ht/${user}/${project}`, - docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - - // tarball url - if (['archive'].includes(aux)) { - return - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - }, -}) - -const names = Object.keys(gitHosts) -gitHosts.byShortcut = {} -gitHosts.byDomain = {} -for (const name of names) { - gitHosts.byShortcut[`${name}:`] = name - gitHosts.byDomain[gitHosts[name].domain] = name -} - -function formatHashFragment (fragment) { - return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') -} - -module.exports = gitHosts diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host.js deleted file mode 100644 index 8a975e92e58bb..0000000000000 --- a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/git-host.js +++ /dev/null @@ -1,110 +0,0 @@ -'use strict' -const gitHosts = require('./git-host-info.js') - -class GitHost { - constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { - Object.assign(this, gitHosts[type]) - this.type = type - this.user = user - this.auth = auth - this.project = project - this.committish = committish - this.default = defaultRepresentation - this.opts = opts - } - - hash () { - return this.committish ? `#${this.committish}` : '' - } - - ssh (opts) { - return this._fill(this.sshtemplate, opts) - } - - _fill (template, opts) { - if (typeof template === 'function') { - const options = { ...this, ...this.opts, ...opts } - - // the path should always be set so we don't end up with 'undefined' in urls - if (!options.path) { - options.path = '' - } - - // template functions will insert the leading slash themselves - if (options.path.startsWith('/')) { - options.path = options.path.slice(1) - } - - if (options.noCommittish) { - options.committish = null - } - - const result = template(options) - return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result - } - - return null - } - - sshurl (opts) { - return this._fill(this.sshurltemplate, opts) - } - - browse (path, fragment, opts) { - // not a string, treat path as opts - if (typeof path !== 'string') { - return this._fill(this.browsetemplate, path) - } - - if (typeof fragment !== 'string') { - opts = fragment - fragment = null - } - return this._fill(this.browsefiletemplate, { ...opts, fragment, path }) - } - - docs (opts) { - return this._fill(this.docstemplate, opts) - } - - bugs (opts) { - return this._fill(this.bugstemplate, opts) - } - - https (opts) { - return this._fill(this.httpstemplate, opts) - } - - git (opts) { - return this._fill(this.gittemplate, opts) - } - - shortcut (opts) { - return this._fill(this.shortcuttemplate, opts) - } - - path (opts) { - return this._fill(this.pathtemplate, opts) - } - - tarball (opts) { - return this._fill(this.tarballtemplate, { ...opts, noCommittish: false }) - } - - file (path, opts) { - return this._fill(this.filetemplate, { ...opts, path }) - } - - getDefaultRepresentation () { - return this.default - } - - toString (opts) { - if (this.default && typeof this[this.default] === 'function') { - return this[this.default](opts) - } - - return this.sshurl(opts) - } -} -module.exports = GitHost diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js deleted file mode 100644 index 8bce6b3c28d51..0000000000000 --- a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js +++ /dev/null @@ -1,244 +0,0 @@ -'use strict' -const url = require('url') -const gitHosts = require('./git-host-info.js') -const GitHost = module.exports = require('./git-host.js') -const LRU = require('lru-cache') -const cache = new LRU({ max: 1000 }) - -const protocolToRepresentationMap = { - 'git+ssh:': 'sshurl', - 'git+https:': 'https', - 'ssh:': 'sshurl', - 'git:': 'git', -} - -function protocolToRepresentation (protocol) { - return protocolToRepresentationMap[protocol] || protocol.slice(0, -1) -} - -const authProtocols = { - 'git:': true, - 'https:': true, - 'git+https:': true, - 'http:': true, - 'git+http:': true, -} - -const knownProtocols = Object.keys(gitHosts.byShortcut) - .concat(['http:', 'https:', 'git:', 'git+ssh:', 'git+https:', 'ssh:']) - -module.exports.fromUrl = function (giturl, opts) { - if (typeof giturl !== 'string') { - return - } - - const key = giturl + JSON.stringify(opts || {}) - - if (!cache.has(key)) { - cache.set(key, fromUrl(giturl, opts)) - } - - return cache.get(key) -} - -function fromUrl (giturl, opts) { - if (!giturl) { - return - } - - const url = isGitHubShorthand(giturl) ? 'github:' + giturl : correctProtocol(giturl) - const parsed = parseGitUrl(url) - if (!parsed) { - return parsed - } - - const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] - const gitHostDomain = - gitHosts.byDomain[parsed.hostname.startsWith('www.') ? - parsed.hostname.slice(4) : - parsed.hostname] - const gitHostName = gitHostShortcut || gitHostDomain - if (!gitHostName) { - return - } - - const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] - let auth = null - if (authProtocols[parsed.protocol] && (parsed.username || parsed.password)) { - auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` - } - - let committish = null - let user = null - let project = null - let defaultRepresentation = null - - try { - if (gitHostShortcut) { - let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname - const firstAt = pathname.indexOf('@') - // we ignore auth for shortcuts, so just trim it out - if (firstAt > -1) { - pathname = pathname.slice(firstAt + 1) - } - - const lastSlash = pathname.lastIndexOf('/') - if (lastSlash > -1) { - user = decodeURIComponent(pathname.slice(0, lastSlash)) - // we want nulls only, never empty strings - if (!user) { - user = null - } - project = decodeURIComponent(pathname.slice(lastSlash + 1)) - } else { - project = decodeURIComponent(pathname) - } - - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (parsed.hash) { - committish = decodeURIComponent(parsed.hash.slice(1)) - } - - defaultRepresentation = 'shortcut' - } else { - if (!gitHostInfo.protocols.includes(parsed.protocol)) { - return - } - - const segments = gitHostInfo.extract(parsed) - if (!segments) { - return - } - - user = segments.user && decodeURIComponent(segments.user) - project = decodeURIComponent(segments.project) - committish = decodeURIComponent(segments.committish) - defaultRepresentation = protocolToRepresentation(parsed.protocol) - } - } catch (err) { - /* istanbul ignore else */ - if (err instanceof URIError) { - return - } else { - throw err - } - } - - return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) -} - -// accepts input like git:github.com:user/repo and inserts the // after the first : -const correctProtocol = (arg) => { - const firstColon = arg.indexOf(':') - const proto = arg.slice(0, firstColon + 1) - if (knownProtocols.includes(proto)) { - return arg - } - - const firstAt = arg.indexOf('@') - if (firstAt > -1) { - if (firstAt > firstColon) { - return `git+ssh://${arg}` - } else { - return arg - } - } - - const doubleSlash = arg.indexOf('//') - if (doubleSlash === firstColon + 1) { - return arg - } - - return arg.slice(0, firstColon + 1) + '//' + arg.slice(firstColon + 1) -} - -// look for github shorthand inputs, such as npm/cli -const isGitHubShorthand = (arg) => { - // it cannot contain whitespace before the first # - // it cannot start with a / because that's probably an absolute file path - // but it must include a slash since repos are username/repository - // it cannot start with a . because that's probably a relative file path - // it cannot start with an @ because that's a scoped package if it passes the other tests - // it cannot contain a : before a # because that tells us that there's a protocol - // a second / may not exist before a # - const firstHash = arg.indexOf('#') - const firstSlash = arg.indexOf('/') - const secondSlash = arg.indexOf('/', firstSlash + 1) - const firstColon = arg.indexOf(':') - const firstSpace = /\s/.exec(arg) - const firstAt = arg.indexOf('@') - - const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) - const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) - const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) - const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) - const hasSlash = firstSlash > 0 - // if a # is found, what we really want to know is that the character - // immediately before # is not a / - const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') - const doesNotStartWithDot = !arg.startsWith('.') - - return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && - doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && - secondSlashOnlyAfterHash -} - -// attempt to correct an scp style url so that it will parse with `new URL()` -const correctUrl = (giturl) => { - const firstAt = giturl.indexOf('@') - const lastHash = giturl.lastIndexOf('#') - let firstColon = giturl.indexOf(':') - let lastColon = giturl.lastIndexOf(':', lastHash > -1 ? lastHash : Infinity) - - let corrected - if (lastColon > firstAt) { - // the last : comes after the first @ (or there is no @) - // like it would in: - // proto://hostname.com:user/repo - // username@hostname.com:user/repo - // :password@hostname.com:user/repo - // username:password@hostname.com:user/repo - // proto://username@hostname.com:user/repo - // proto://:password@hostname.com:user/repo - // proto://username:password@hostname.com:user/repo - // then we replace the last : with a / to create a valid path - corrected = giturl.slice(0, lastColon) + '/' + giturl.slice(lastColon + 1) - // // and we find our new : positions - firstColon = corrected.indexOf(':') - lastColon = corrected.lastIndexOf(':') - } - - if (firstColon === -1 && giturl.indexOf('//') === -1) { - // we have no : at all - // as it would be in: - // username@hostname.com/user/repo - // then we prepend a protocol - corrected = `git+ssh://${corrected}` - } - - return corrected -} - -// try to parse the url as its given to us, if that throws -// then we try to clean the url and parse that result instead -// THIS FUNCTION SHOULD NEVER THROW -const parseGitUrl = (giturl) => { - let result - try { - result = new url.URL(giturl) - } catch (err) {} - - if (result) { - return result - } - - const correctedUrl = correctUrl(giturl) - try { - result = new url.URL(correctedUrl) - } catch (err) {} - - return result -} diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json b/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json deleted file mode 100644 index 0153b0852cbf4..0000000000000 --- a/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "hosted-git-info", - "version": "5.0.0", - "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", - "main": "./lib/index.js", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/hosted-git-info.git" - }, - "keywords": [ - "git", - "github", - "bitbucket", - "gitlab" - ], - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/npm/hosted-git-info/issues" - }, - "homepage": "https://github.com/npm/hosted-git-info", - "scripts": { - "posttest": "npm run lint", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preversion": "npm test", - "snap": "tap", - "test": "tap", - "test:coverage": "tap --coverage-report=html", - "lint": "eslint '**/*.js'", - "postlint": "npm-template-check", - "template-copy": "npm-template-copy --force", - "lintfix": "npm run lint -- --fix" - }, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "tap": "^15.1.6" - }, - "files": [ - "bin", - "lib" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - }, - "tap": { - "color": 1, - "coverage": true - }, - "templateOSS": { - "version": "2.9.2" - } -} diff --git a/node_modules/npm-package-arg/node_modules/lru-cache/LICENSE b/node_modules/npm-package-arg/node_modules/lru-cache/LICENSE deleted file mode 100644 index 9b58a3e03d1df..0000000000000 --- a/node_modules/npm-package-arg/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-package-arg/node_modules/lru-cache/index.js b/node_modules/npm-package-arg/node_modules/lru-cache/index.js deleted file mode 100644 index e37f51616452e..0000000000000 --- a/node_modules/npm-package-arg/node_modules/lru-cache/index.js +++ /dev/null @@ -1,615 +0,0 @@ -const perf = typeof performance === 'object' && performance && - typeof performance.now === 'function' ? performance : Date - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} -const shouldWarn = (code) => typeof process === 'object' && - process && - !(process.noDeprecation || warned.has(code)) -const warn = (code, what, instead, fn) => { - warned.add(code) - process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => !isPosInt(max) ? null -: max <= Math.pow(2, 8) ? Uint8Array -: max <= Math.pow(2, 16) ? Uint16Array -: max <= Math.pow(2, 32) ? Uint32Array -: max <= Number.MAX_SAFE_INTEGER ? ZeroArray -: null - -class ZeroArray extends Array { - constructor (size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor (max) { - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push (n) { - this.heap[this.length++] = n - } - pop () { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor (options = {}) { - const { - max, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize, - sizeCalculation, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { - length, - maxAge, - stale, - } = options instanceof LRUCache ? {} : options - - if (!isPosInt(max)) { - throw new TypeError('max option must be an integer') - } - - const UintArray = getUintArray(max) - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize || 0 - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize') - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculating set to non-function') - } - } - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - - if (this.maxSize) { - if (!isPosInt(this.maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified') - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.updateAgeOnGet = !!updateAgeOnGet - this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified') - } - this.initializeTTLTracking() - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - initializeTTLTracking () { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - this.setItemTTL = (index, ttl) => { - this.starts[index] = ttl !== 0 ? perf.now() : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - this.updateItemAge = (index) => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout(() => cachedNow = 0, this.ttlResolution) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - this.isStale = (index) => { - return this.ttls[index] !== 0 && this.starts[index] !== 0 && - ((cachedNow || getNow()) - this.starts[index] > this.ttls[index]) - } - } - updateItemAge (index) {} - setItemTTL (index, ttl) {} - isStale (index) { return false } - - initializeSizeTracking () { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => this.calculatedSize -= this.sizes[index] - this.addItemSize = (index, v, k, size, sizeCalculation) => { - const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0) - this.sizes[index] = isPosInt(s) ? s : 0 - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict() - } - this.calculatedSize += this.sizes[index] - } - this.delete = k => { - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - this.calculatedSize -= this.sizes[index] - } - } - return LRUCache.prototype.delete.call(this, k) - } - } - removeItemSize (index) {} - addItemSize (index, v, k, size, sizeCalculation) {} - - *indexes ({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.tail, j; true; ) { - if (!this.isValidIndex(i)) { - break - } - j = i === this.head - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } else { - i = this.prev[i] - } - } - } - } - - *rindexes ({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.head, j; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - // either the tail now, or WAS the tail, and deleted - if (i === this.tail) { - break - } else { - i = this.next[i] - } - } - } - } - - isValidIndex (index) { - return this.keyMap.get(this.keyList[index]) === index - } - - *entries () { - for (const i of this.indexes()) { - yield [this.keyList[i], this.valList[i]] - } - } - *rentries () { - for (const i of this.rindexes()) { - yield [this.keyList[i], this.valList[i]] - } - } - - *keys () { - for (const i of this.indexes()) { - yield this.keyList[i] - } - } - *rkeys () { - for (const i of this.rindexes()) { - yield this.keyList[i] - } - } - - *values () { - for (const i of this.indexes()) { - yield this.valList[i] - } - } - *rvalues () { - for (const i of this.rindexes()) { - yield this.valList[i] - } - } - - [Symbol.iterator] () { - return this.entries() - } - - find (fn, getOptions = {}) { - for (const i of this.indexes()) { - if (fn(this.valList[i], this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach (fn, thisp = this) { - for (const i of this.indexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) - } - } - - rforEach (fn, thisp = this) { - for (const i of this.rindexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) - } - } - - get prune () { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale () { - let deleted = false - for (const i of this.rindexes({ allowStale: true })) { - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - } - return deleted - } - - dump () { - const arr = [] - for (const i of this.indexes()) { - const key = this.keyList[i] - const value = this.valList[i] - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load (arr) { - this.clear() - for (const [key, entry] of arr) { - this.set(key, entry.value, entry) - } - } - - dispose (v, k, reason) {} - - set (k, v, { - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - } = {}) { - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size ++ - this.addItemSize(index, v, k, size, sizeCalculation) - noUpdateTTL = false - } else { - // update - const oldVal = this.valList[index] - if (v !== oldVal) { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, v, k, size, sizeCalculation) - } - this.moveToTail(index) - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl) - } - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex () { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max) { - return this.evict() - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop () { - if (this.size) { - const val = this.valList[this.head] - this.evict() - return val - } - } - - evict () { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - this.removeItemSize(head) - this.head = this.next[head] - this.keyMap.delete(k) - this.size -- - return head - } - - has (k) { - return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k)) - } - - // like get(), but without any LRU updating or TTL expiration - peek (k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - return this.valList[index] - } - } - - get (k, { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (this.isStale(index)) { - const value = allowStale ? this.valList[index] : undefined - this.delete(k) - return value - } else { - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return this.valList[index] - } - } - } - - connect (p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail (index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del () { - deprecatedMethod('del', 'delete') - return this.delete - } - delete (k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - this.dispose(this.valList[index], k, 'delete') - if (this.disposeAfter) { - this.disposed.push([this.valList[index], k, 'delete']) - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size -- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear () { - if (this.dispose !== LRUCache.prototype.dispose) { - for (const index of this.rindexes({ allowStale: true })) { - this.dispose(this.valList[index], this.keyList[index], 'delete') - } - } - if (this.disposeAfter) { - for (const index of this.rindexes({ allowStale: true })) { - this.disposed.push([this.valList[index], this.keyList[index], 'delete']) - } - } - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - get reset () { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length () { - deprecatedProperty('length', 'size') - return this.size - } -} - -module.exports = LRUCache diff --git a/node_modules/npm-package-arg/node_modules/lru-cache/package.json b/node_modules/npm-package-arg/node_modules/lru-cache/package.json deleted file mode 100644 index a62f74c2b648a..0000000000000 --- a/node_modules/npm-package-arg/node_modules/lru-cache/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "7.5.1", - "author": "Isaac Z. Schlueter ", - "keywords": [ - "mru", - "lru", - "cache" - ], - "scripts": { - "build": "", - "test": "tap", - "snap": "tap", - "size": "size-limit", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "main": "index.js", - "repository": "git://github.com/isaacs/node-lru-cache.git", - "devDependencies": { - "@size-limit/preset-small-lib": "^7.0.8", - "benchmark": "^2.1.4", - "size-limit": "^7.0.8", - "tap": "^15.1.6" - }, - "license": "ISC", - "files": [ - "index.js" - ], - "engines": { - "node": ">=12" - }, - "tap": { - "coverage-map": "map.js" - }, - "size-limit": [ - { - "path": "./index.js" - } - ] -} diff --git a/package-lock.json b/package-lock.json index a1def5dfb5ebc..8fc2c375b0342 100644 --- a/package-lock.json +++ b/package-lock.json @@ -106,7 +106,7 @@ "fastest-levenshtein": "^1.0.12", "glob": "^7.2.0", "graceful-fs": "^4.2.9", - "hosted-git-info": "^4.1.0", + "hosted-git-info": "^5.0.0", "ini": "^2.0.0", "init-package-json": "^3.0.1", "is-cidr": "^4.0.2", @@ -3783,15 +3783,24 @@ } }, "node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", "inBundle": true, "dependencies": { - "lru-cache": "^6.0.0" + "lru-cache": "^7.5.1" }, "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" + } + }, + "node_modules/hosted-git-info/node_modules/lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", + "inBundle": true, + "engines": { + "node": ">=12" } }, "node_modules/html-encoding-sniffer": { @@ -5452,27 +5461,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/normalize-package-data/node_modules/hosted-git-info": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", - "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", - "inBundle": true, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, - "node_modules/normalize-package-data/node_modules/lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -5541,27 +5529,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/npm-package-arg/node_modules/hosted-git-info": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", - "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", - "inBundle": true, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" - } - }, - "node_modules/npm-package-arg/node_modules/lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, "node_modules/npm-packlist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-4.0.0.tgz", @@ -13633,11 +13600,18 @@ } }, "hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", + "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", "requires": { - "lru-cache": "^6.0.0" + "lru-cache": "^7.5.1" + }, + "dependencies": { + "lru-cache": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" + } } }, "html-encoding-sniffer": { @@ -14504,7 +14478,7 @@ "libnpmpack": "^4.0.0", "lodash.clonedeep": "^4.5.0", "nock": "^12.0.2", - "normalize-package-data": "4.0.0", + "normalize-package-data": "^4.0.0", "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", "semver": "^7.1.3", @@ -15098,21 +15072,6 @@ "is-core-module": "^2.8.1", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4" - }, - "dependencies": { - "hosted-git-info": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", - "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", - "requires": { - "lru-cache": "^7.5.1" - } - }, - "lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" - } } }, "normalize-path": { @@ -15164,21 +15123,6 @@ "hosted-git-info": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-name": "^3.0.0" - }, - "dependencies": { - "hosted-git-info": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-5.0.0.tgz", - "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", - "requires": { - "lru-cache": "^7.5.1" - } - }, - "lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" - } } }, "npm-packlist": { diff --git a/package.json b/package.json index 29d47da5498b5..4cf42d3e50c23 100644 --- a/package.json +++ b/package.json @@ -74,7 +74,7 @@ "fastest-levenshtein": "^1.0.12", "glob": "^7.2.0", "graceful-fs": "^4.2.9", - "hosted-git-info": "^4.1.0", + "hosted-git-info": "^5.0.0", "ini": "^2.0.0", "init-package-json": "^3.0.1", "is-cidr": "^4.0.2", From df00107f7e169f997280f8f5f8b59fc520fe0762 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 15 Mar 2022 13:27:44 -0700 Subject: [PATCH 11/11] deps: cacache@16.0.1 --- .../git/node_modules/lru-cache/bundle/main.js | 1 - .../node_modules/lru-cache/bundle/main.mjs | 1 - .../git/node_modules/lru-cache/index.js | 587 ------------- .../git/node_modules/lru-cache/package.json | 52 -- node_modules/cacache/lib/memoization.js | 10 +- node_modules/cacache/package.json | 8 +- .../node_modules/lru-cache/LICENSE | 15 - .../node_modules/lru-cache/index.js | 615 -------------- node_modules/lru-cache/LICENSE | 2 +- node_modules/lru-cache/index.js | 801 ++++++++++++------ node_modules/lru-cache/package.json | 23 +- .../node_modules/lru-cache/LICENSE | 15 - .../node_modules/lru-cache/index.js | 615 -------------- .../node_modules/lru-cache/package.json | 43 - .../node_modules/lru-cache/LICENSE | 2 +- .../semver/node_modules/lru-cache/index.js | 334 ++++++++ .../node_modules/lru-cache/package.json | 23 +- package-lock.json | 116 +-- package.json | 2 +- 19 files changed, 951 insertions(+), 2314 deletions(-) delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/bundle/main.js delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/bundle/main.mjs delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/index.js delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/package.json delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/LICENSE delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/index.js delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/index.js delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/package.json rename node_modules/{@npmcli/git => semver}/node_modules/lru-cache/LICENSE (92%) create mode 100644 node_modules/semver/node_modules/lru-cache/index.js rename node_modules/{hosted-git-info => semver}/node_modules/lru-cache/package.json (67%) diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/bundle/main.js b/node_modules/@npmcli/git/node_modules/lru-cache/bundle/main.js deleted file mode 100644 index 7eef327e92527..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/bundle/main.js +++ /dev/null @@ -1 +0,0 @@ -(()=>{var t={10:t=>{const i="object"==typeof performance&&performance&&"function"==typeof performance.now?performance:Date,s=new Set,e=(t,i)=>{const s=`LRU_CACHE_OPTION_${t}`;l(s)&&o(s,`${t} option`,`options.${i}`,d)},h=(t,i)=>{const s=`LRU_CACHE_METHOD_${t}`;if(l(s)){const{prototype:e}=d,{get:h}=Object.getOwnPropertyDescriptor(e,t);o(s,`${t} method`,`cache.${i}()`,h)}},l=t=>!(process.noDeprecation||s.has(t)),o=(t,i,e,h)=>{s.add(t),process.emitWarning(`The ${i} is deprecated. Please use ${e} instead.`,"DeprecationWarning",t,h)},n=t=>t&&t===Math.floor(t)&&t>0&&isFinite(t),a=t=>n(t)?t<=Math.pow(2,8)?Uint8Array:t<=Math.pow(2,16)?Uint16Array:t<=Math.pow(2,32)?Uint32Array:t<=Number.MAX_SAFE_INTEGER?r:null:null;class r extends Array{constructor(t){super(t),this.fill(0)}}class p{constructor(t){const i=a(t);this.heap=new i(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}}class d{constructor(t={}){const{max:i,ttl:s,ttlResolution:h=1,ttlAutopurge:l,updateAgeOnGet:o,allowStale:r,dispose:c,disposeAfter:f,noDisposeOnSet:u,noUpdateTTL:z,maxSize:v,sizeCalculation:y}=t,{length:g,maxAge:m,stale:S}=t instanceof d?{}:t;if(!n(i))throw new TypeError("max option must be an integer");const x=a(i);if(!x)throw new Error("invalid max value: "+i);if(this.max=i,this.maxSize=v||0,this.sizeCalculation=y||g,this.sizeCalculation){if(!this.maxSize)throw new TypeError("cannot set sizeCalculation without setting maxSize");if("function"!=typeof this.sizeCalculation)throw new TypeError("sizeCalculating set to non-function")}if(this.keyMap=new Map,this.keyList=new Array(i).fill(null),this.valList=new Array(i).fill(null),this.next=new x(i),this.prev=new x(i),this.head=0,this.tail=0,this.free=new p(i),this.initialFill=1,this.size=0,"function"==typeof c&&(this.dispose=c),"function"==typeof f?(this.disposeAfter=f,this.disposed=[]):(this.disposeAfter=null,this.disposed=null),this.noDisposeOnSet=!!u,this.noUpdateTTL=!!z,this.maxSize){if(!n(this.maxSize))throw new TypeError("maxSize must be a positive integer if specified");this.initializeSizeTracking()}if(this.allowStale=!!r||!!S,this.updateAgeOnGet=!!o,this.ttlResolution=n(h)||0===h?h:1,this.ttlAutopurge=!!l,this.ttl=s||m||0,this.ttl){if(!n(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.initializeTTLTracking()}S&&e("stale","allowStale"),m&&e("maxAge","ttl"),g&&e("length","sizeCalculation")}initializeTTLTracking(){this.ttls=new r(this.max),this.starts=new r(this.max),this.setItemTTL=(t,s)=>{if(this.starts[t]=0!==s?i.now():0,this.ttls[t]=s,0!==s&&this.ttlAutopurge){const i=setTimeout((()=>{this.isStale(t)&&this.delete(this.keyList[t])}),s+1);i.unref&&i.unref()}},this.updateItemAge=t=>{this.starts[t]=0!==this.ttls[t]?i.now():0};let t=0;const s=()=>{const s=i.now();if(this.ttlResolution>0){t=s;const i=setTimeout((()=>t=0),this.ttlResolution);i.unref&&i.unref()}return s};this.isStale=i=>0!==this.ttls[i]&&0!==this.starts[i]&&(t||s())-this.starts[i]>this.ttls[i]}updateItemAge(t){}setItemTTL(t,i){}isStale(t){return!1}initializeSizeTracking(){this.calculatedSize=0,this.sizes=new r(this.max),this.removeItemSize=t=>this.calculatedSize-=this.sizes[t],this.addItemSize=(t,i,s,e,h)=>{const l=e||(h?h(i,s):0);this.sizes[t]=n(l)?l:0;const o=this.maxSize-this.sizes[t];for(;this.calculatedSize>o;)this.evict();this.calculatedSize+=this.sizes[t]},this.delete=t=>{if(0!==this.size){const i=this.keyMap.get(t);void 0!==i&&(this.calculatedSize-=this.sizes[i])}return d.prototype.delete.call(this,t)}}removeItemSize(t){}addItemSize(t,i,s,e,h){}*indexes(){if(this.size)for(let t=this.tail;this.isStale(t)||(yield t),t!==this.head;t=this.prev[t]);}*rindexes(){if(this.size)for(let t=this.head;this.isStale(t)||(yield t),t!==this.tail;t=this.next[t]);}*entries(){for(const t of this.indexes())yield[this.keyList[t],this.valList[t]]}*keys(){for(const t of this.indexes())yield this.keyList[t]}*values(){for(const t of this.indexes())yield this.valList[t]}[Symbol.iterator](){return this.entries()}find(t,i={}){for(const s of this.indexes())if(t(this.valList[s],this.keyList[s],this))return this.get(this.keyList[s],i)}forEach(t,i=this){for(const s of this.indexes())t.call(i,this.valList[s],this.keyList[s],this)}rforEach(t,i=this){for(const s of this.rindexes())t.call(i,this.valList[s],this.keyList[s],this)}get prune(){return h("prune","purgeStale"),this.purgeStale}purgeStale(){let t=!1;if(this.size)for(let i=this.head;;i=this.next[i]){const s=i===this.tail;if(this.isStale(i)&&(this.delete(this.keyList[i]),t=!0),s)break}return t}dump(){const t=[];for(const i of this.indexes()){const s=this.keyList[i],e={value:this.valList[i]};this.ttls&&(e.ttl=this.ttls[i]),this.sizes&&(e.size=this.sizes[i]),t.unshift([s,e])}return t}load(t){this.clear();for(const[i,s]of t)this.set(i,s.value,s)}dispose(t,i,s){}set(t,i,{ttl:s=this.ttl,noDisposeOnSet:e=this.noDisposeOnSet,size:h=0,sizeCalculation:l=this.sizeCalculation,noUpdateTTL:o=this.noUpdateTTL}={}){let n=0===this.size?void 0:this.keyMap.get(t);if(void 0===n)n=this.newIndex(),this.keyList[n]=t,this.valList[n]=i,this.keyMap.set(t,n),this.next[this.tail]=n,this.prev[n]=this.tail,this.tail=n,this.size++,this.addItemSize(n,i,t,h,l),o=!1;else{const s=this.valList[n];i!==s&&(e||(this.dispose(s,t,"set"),this.disposeAfter&&this.disposed.push([s,t,"set"])),this.removeItemSize(n),this.valList[n]=i,this.addItemSize(n,i,t,h,l)),this.moveToTail(n)}if(0===s||0!==this.ttl||this.ttls||this.initializeTTLTracking(),o||this.setItemTTL(n,s),this.disposeAfter)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return this}newIndex(){return 0===this.size?this.tail:this.size===this.max?this.evict():0!==this.free.length?this.free.pop():this.initialFill++}pop(){if(this.size){const t=this.valList[this.head];return this.evict(),t}}evict(){const t=this.head,i=this.keyList[t],s=this.valList[t];return this.dispose(s,i,"evict"),this.disposeAfter&&this.disposed.push([s,i,"evict"]),this.removeItemSize(t),this.head=this.next[t],this.keyMap.delete(i),this.size--,t}has(t){return this.keyMap.has(t)&&!this.isStale(this.keyMap.get(t))}peek(t,{allowStale:i=this.allowStale}={}){const s=this.keyMap.get(t);if(void 0!==s&&(i||!this.isStale(s)))return this.valList[s]}get(t,{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet}={}){const e=this.keyMap.get(t);if(void 0!==e){if(this.isStale(e)){const s=i?this.valList[e]:void 0;return this.delete(t),s}return this.moveToTail(e),s&&this.updateItemAge(e),this.valList[e]}}connect(t,i){this.prev[i]=t,this.next[t]=i}moveToTail(t){t!==this.tail&&(t===this.head?this.head=this.next[t]:this.connect(this.prev[t],this.next[t]),this.connect(this.tail,t),this.tail=t)}get del(){return h("del","delete"),this.delete}delete(t){let i=!1;if(0!==this.size){const s=this.keyMap.get(t);void 0!==s&&(i=!0,1===this.size?this.clear():(this.removeItemSize(s),this.dispose(this.valList[s],t,"delete"),this.disposeAfter&&this.disposed.push([this.valList[s],t,"delete"]),this.keyMap.delete(t),this.keyList[s]=null,this.valList[s]=null,s===this.tail?this.tail=this.prev[s]:s===this.head?this.head=this.next[s]:(this.next[this.prev[s]]=this.next[s],this.prev[this.next[s]]=this.prev[s]),this.size--,this.free.push(s)))}if(this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return i}clear(){if(this.dispose!==d.prototype.dispose)for(const t of this.rindexes())this.dispose(this.valList[t],this.keyList[t],"delete");if(this.disposeAfter)for(const t of this.rindexes())this.disposed.push([this.valList[t],this.keyList[t],"delete"]);if(this.keyMap.clear(),this.valList.fill(null),this.keyList.fill(null),this.ttls&&(this.ttls.fill(0),this.starts.fill(0)),this.sizes&&this.sizes.fill(0),this.head=0,this.tail=0,this.initialFill=1,this.free.length=0,this.calculatedSize=0,this.size=0,this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift())}get reset(){return h("reset","clear"),this.clear}get length(){return((t,i)=>{const s="LRU_CACHE_PROPERTY_length";if(l(s)){const{prototype:i}=d,{get:e}=Object.getOwnPropertyDescriptor(i,t);o(s,"length property","cache.size",e)}})("length"),this.size}}t.exports=d}},i={},s=function s(e){var h=i[e];if(void 0!==h)return h.exports;var l=i[e]={exports:{}};return t[e](l,l.exports,s),l.exports}(10);module.exports=s})(); \ No newline at end of file diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/bundle/main.mjs b/node_modules/@npmcli/git/node_modules/lru-cache/bundle/main.mjs deleted file mode 100644 index 3a4d674c07a41..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/bundle/main.mjs +++ /dev/null @@ -1 +0,0 @@ -var t={10:t=>{const i="object"==typeof performance&&performance&&"function"==typeof performance.now?performance:Date,s=new Set,e=(t,i)=>{const s=`LRU_CACHE_OPTION_${t}`;l(s)&&o(s,`${t} option`,`options.${i}`,d)},h=(t,i)=>{const s=`LRU_CACHE_METHOD_${t}`;if(l(s)){const{prototype:e}=d,{get:h}=Object.getOwnPropertyDescriptor(e,t);o(s,`${t} method`,`cache.${i}()`,h)}},l=t=>!(process.noDeprecation||s.has(t)),o=(t,i,e,h)=>{s.add(t),process.emitWarning(`The ${i} is deprecated. Please use ${e} instead.`,"DeprecationWarning",t,h)},n=t=>t&&t===Math.floor(t)&&t>0&&isFinite(t),a=t=>n(t)?t<=Math.pow(2,8)?Uint8Array:t<=Math.pow(2,16)?Uint16Array:t<=Math.pow(2,32)?Uint32Array:t<=Number.MAX_SAFE_INTEGER?r:null:null;class r extends Array{constructor(t){super(t),this.fill(0)}}class p{constructor(t){const i=a(t);this.heap=new i(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}}class d{constructor(t={}){const{max:i,ttl:s,ttlResolution:h=1,ttlAutopurge:l,updateAgeOnGet:o,allowStale:r,dispose:c,disposeAfter:f,noDisposeOnSet:u,noUpdateTTL:z,maxSize:v,sizeCalculation:y}=t,{length:g,maxAge:m,stale:S}=t instanceof d?{}:t;if(!n(i))throw new TypeError("max option must be an integer");const L=a(i);if(!L)throw new Error("invalid max value: "+i);if(this.max=i,this.maxSize=v||0,this.sizeCalculation=y||g,this.sizeCalculation){if(!this.maxSize)throw new TypeError("cannot set sizeCalculation without setting maxSize");if("function"!=typeof this.sizeCalculation)throw new TypeError("sizeCalculating set to non-function")}if(this.keyMap=new Map,this.keyList=new Array(i).fill(null),this.valList=new Array(i).fill(null),this.next=new L(i),this.prev=new L(i),this.head=0,this.tail=0,this.free=new p(i),this.initialFill=1,this.size=0,"function"==typeof c&&(this.dispose=c),"function"==typeof f?(this.disposeAfter=f,this.disposed=[]):(this.disposeAfter=null,this.disposed=null),this.noDisposeOnSet=!!u,this.noUpdateTTL=!!z,this.maxSize){if(!n(this.maxSize))throw new TypeError("maxSize must be a positive integer if specified");this.initializeSizeTracking()}if(this.allowStale=!!r||!!S,this.updateAgeOnGet=!!o,this.ttlResolution=n(h)||0===h?h:1,this.ttlAutopurge=!!l,this.ttl=s||m||0,this.ttl){if(!n(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.initializeTTLTracking()}S&&e("stale","allowStale"),m&&e("maxAge","ttl"),g&&e("length","sizeCalculation")}initializeTTLTracking(){this.ttls=new r(this.max),this.starts=new r(this.max),this.setItemTTL=(t,s)=>{if(this.starts[t]=0!==s?i.now():0,this.ttls[t]=s,0!==s&&this.ttlAutopurge){const i=setTimeout((()=>{this.isStale(t)&&this.delete(this.keyList[t])}),s+1);i.unref&&i.unref()}},this.updateItemAge=t=>{this.starts[t]=0!==this.ttls[t]?i.now():0};let t=0;const s=()=>{const s=i.now();if(this.ttlResolution>0){t=s;const i=setTimeout((()=>t=0),this.ttlResolution);i.unref&&i.unref()}return s};this.isStale=i=>0!==this.ttls[i]&&0!==this.starts[i]&&(t||s())-this.starts[i]>this.ttls[i]}updateItemAge(t){}setItemTTL(t,i){}isStale(t){return!1}initializeSizeTracking(){this.calculatedSize=0,this.sizes=new r(this.max),this.removeItemSize=t=>this.calculatedSize-=this.sizes[t],this.addItemSize=(t,i,s,e,h)=>{const l=e||(h?h(i,s):0);this.sizes[t]=n(l)?l:0;const o=this.maxSize-this.sizes[t];for(;this.calculatedSize>o;)this.evict();this.calculatedSize+=this.sizes[t]},this.delete=t=>{if(0!==this.size){const i=this.keyMap.get(t);void 0!==i&&(this.calculatedSize-=this.sizes[i])}return d.prototype.delete.call(this,t)}}removeItemSize(t){}addItemSize(t,i,s,e,h){}*indexes(){if(this.size)for(let t=this.tail;this.isStale(t)||(yield t),t!==this.head;t=this.prev[t]);}*rindexes(){if(this.size)for(let t=this.head;this.isStale(t)||(yield t),t!==this.tail;t=this.next[t]);}*entries(){for(const t of this.indexes())yield[this.keyList[t],this.valList[t]]}*keys(){for(const t of this.indexes())yield this.keyList[t]}*values(){for(const t of this.indexes())yield this.valList[t]}[Symbol.iterator](){return this.entries()}find(t,i={}){for(const s of this.indexes())if(t(this.valList[s],this.keyList[s],this))return this.get(this.keyList[s],i)}forEach(t,i=this){for(const s of this.indexes())t.call(i,this.valList[s],this.keyList[s],this)}rforEach(t,i=this){for(const s of this.rindexes())t.call(i,this.valList[s],this.keyList[s],this)}get prune(){return h("prune","purgeStale"),this.purgeStale}purgeStale(){let t=!1;if(this.size)for(let i=this.head;;i=this.next[i]){const s=i===this.tail;if(this.isStale(i)&&(this.delete(this.keyList[i]),t=!0),s)break}return t}dump(){const t=[];for(const i of this.indexes()){const s=this.keyList[i],e={value:this.valList[i]};this.ttls&&(e.ttl=this.ttls[i]),this.sizes&&(e.size=this.sizes[i]),t.unshift([s,e])}return t}load(t){this.clear();for(const[i,s]of t)this.set(i,s.value,s)}dispose(t,i,s){}set(t,i,{ttl:s=this.ttl,noDisposeOnSet:e=this.noDisposeOnSet,size:h=0,sizeCalculation:l=this.sizeCalculation,noUpdateTTL:o=this.noUpdateTTL}={}){let n=0===this.size?void 0:this.keyMap.get(t);if(void 0===n)n=this.newIndex(),this.keyList[n]=t,this.valList[n]=i,this.keyMap.set(t,n),this.next[this.tail]=n,this.prev[n]=this.tail,this.tail=n,this.size++,this.addItemSize(n,i,t,h,l),o=!1;else{const s=this.valList[n];i!==s&&(e||(this.dispose(s,t,"set"),this.disposeAfter&&this.disposed.push([s,t,"set"])),this.removeItemSize(n),this.valList[n]=i,this.addItemSize(n,i,t,h,l)),this.moveToTail(n)}if(0===s||0!==this.ttl||this.ttls||this.initializeTTLTracking(),o||this.setItemTTL(n,s),this.disposeAfter)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return this}newIndex(){return 0===this.size?this.tail:this.size===this.max?this.evict():0!==this.free.length?this.free.pop():this.initialFill++}pop(){if(this.size){const t=this.valList[this.head];return this.evict(),t}}evict(){const t=this.head,i=this.keyList[t],s=this.valList[t];return this.dispose(s,i,"evict"),this.disposeAfter&&this.disposed.push([s,i,"evict"]),this.removeItemSize(t),this.head=this.next[t],this.keyMap.delete(i),this.size--,t}has(t){return this.keyMap.has(t)&&!this.isStale(this.keyMap.get(t))}peek(t,{allowStale:i=this.allowStale}={}){const s=this.keyMap.get(t);if(void 0!==s&&(i||!this.isStale(s)))return this.valList[s]}get(t,{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet}={}){const e=this.keyMap.get(t);if(void 0!==e){if(this.isStale(e)){const s=i?this.valList[e]:void 0;return this.delete(t),s}return this.moveToTail(e),s&&this.updateItemAge(e),this.valList[e]}}connect(t,i){this.prev[i]=t,this.next[t]=i}moveToTail(t){t!==this.tail&&(t===this.head?this.head=this.next[t]:this.connect(this.prev[t],this.next[t]),this.connect(this.tail,t),this.tail=t)}get del(){return h("del","delete"),this.delete}delete(t){let i=!1;if(0!==this.size){const s=this.keyMap.get(t);void 0!==s&&(i=!0,1===this.size?this.clear():(this.removeItemSize(s),this.dispose(this.valList[s],t,"delete"),this.disposeAfter&&this.disposed.push([this.valList[s],t,"delete"]),this.keyMap.delete(t),this.keyList[s]=null,this.valList[s]=null,s===this.tail?this.tail=this.prev[s]:s===this.head?this.head=this.next[s]:(this.next[this.prev[s]]=this.next[s],this.prev[this.next[s]]=this.prev[s]),this.size--,this.free.push(s)))}if(this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return i}clear(){if(this.dispose!==d.prototype.dispose)for(const t of this.rindexes())this.dispose(this.valList[t],this.keyList[t],"delete");if(this.disposeAfter)for(const t of this.rindexes())this.disposed.push([this.valList[t],this.keyList[t],"delete"]);if(this.keyMap.clear(),this.valList.fill(null),this.keyList.fill(null),this.ttls&&(this.ttls.fill(0),this.starts.fill(0)),this.sizes&&this.sizes.fill(0),this.head=0,this.tail=0,this.initialFill=1,this.free.length=0,this.calculatedSize=0,this.size=0,this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift())}get reset(){return h("reset","clear"),this.clear}get length(){return((t,i)=>{const s="LRU_CACHE_PROPERTY_length";if(l(s)){const{prototype:i}=d,{get:e}=Object.getOwnPropertyDescriptor(i,t);o(s,"length property","cache.size",e)}})("length"),this.size}}t.exports=d}},i={};!function s(e){var h=i[e];if(void 0!==h)return h.exports;var l=i[e]={exports:{}};return t[e](l,l.exports,s),l.exports}(10); \ No newline at end of file diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/index.js deleted file mode 100644 index e9b2f37013e72..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/index.js +++ /dev/null @@ -1,587 +0,0 @@ -const perf = typeof performance === 'object' && performance && - typeof performance.now === 'function' ? performance : Date - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} -const shouldWarn = (code) => !(process.noDeprecation || warned.has(code)) -const warn = (code, what, instead, fn) => { - warned.add(code) - process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => !isPosInt(max) ? null -: max <= Math.pow(2, 8) ? Uint8Array -: max <= Math.pow(2, 16) ? Uint16Array -: max <= Math.pow(2, 32) ? Uint32Array -: max <= Number.MAX_SAFE_INTEGER ? ZeroArray -: null - -class ZeroArray extends Array { - constructor (size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor (max) { - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push (n) { - this.heap[this.length++] = n - } - pop () { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor (options = {}) { - const { - max, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize, - sizeCalculation, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { - length, - maxAge, - stale, - } = options instanceof LRUCache ? {} : options - - if (!isPosInt(max)) { - throw new TypeError('max option must be an integer') - } - - const UintArray = getUintArray(max) - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize || 0 - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize') - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculating set to non-function') - } - } - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - - if (this.maxSize) { - if (!isPosInt(this.maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified') - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.updateAgeOnGet = !!updateAgeOnGet - this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified') - } - this.initializeTTLTracking() - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - initializeTTLTracking () { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - this.setItemTTL = (index, ttl) => { - this.starts[index] = ttl !== 0 ? perf.now() : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - this.updateItemAge = (index) => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout(() => cachedNow = 0, this.ttlResolution) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - this.isStale = (index) => { - return this.ttls[index] !== 0 && this.starts[index] !== 0 && - ((cachedNow || getNow()) - this.starts[index] > this.ttls[index]) - } - } - updateItemAge (index) {} - setItemTTL (index, ttl) {} - isStale (index) { return false } - - initializeSizeTracking () { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => this.calculatedSize -= this.sizes[index] - this.addItemSize = (index, v, k, size, sizeCalculation) => { - const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0) - this.sizes[index] = isPosInt(s) ? s : 0 - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict() - } - this.calculatedSize += this.sizes[index] - } - this.delete = k => { - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - this.calculatedSize -= this.sizes[index] - } - } - return LRUCache.prototype.delete.call(this, k) - } - } - removeItemSize (index) {} - addItemSize (index, v, k, size, sizeCalculation) {} - - *indexes () { - if (this.size) { - for (let i = this.tail; true; i = this.prev[i]) { - if (!this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } - } - } - } - *rindexes () { - if (this.size) { - for (let i = this.head; true; i = this.next[i]) { - if (!this.isStale(i)) { - yield i - } - if (i === this.tail) { - break - } - } - } - } - - *entries () { - for (const i of this.indexes()) { - yield [this.keyList[i], this.valList[i]] - } - } - - *keys () { - for (const i of this.indexes()) { - yield this.keyList[i] - } - } - - *values () { - for (const i of this.indexes()) { - yield this.valList[i] - } - } - - [Symbol.iterator] () { - return this.entries() - } - - find (fn, getOptions = {}) { - for (const i of this.indexes()) { - if (fn(this.valList[i], this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach (fn, thisp = this) { - for (const i of this.indexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) - } - } - - rforEach (fn, thisp = this) { - for (const i of this.rindexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) - } - } - - get prune () { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale () { - let deleted = false - if (this.size) { - for (let i = this.head; true; i = this.next[i]) { - const b = i === this.tail - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - if (b) { - break - } - } - } - return deleted - } - - dump () { - const arr = [] - for (const i of this.indexes()) { - const key = this.keyList[i] - const value = this.valList[i] - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load (arr) { - this.clear() - for (const [key, entry] of arr) { - this.set(key, entry.value, entry) - } - } - - dispose (v, k, reason) {} - - set (k, v, { - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - } = {}) { - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size ++ - this.addItemSize(index, v, k, size, sizeCalculation) - noUpdateTTL = false - } else { - // update - const oldVal = this.valList[index] - if (v !== oldVal) { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, v, k, size, sizeCalculation) - } - this.moveToTail(index) - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl) - } - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex () { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max) { - return this.evict() - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop () { - if (this.size) { - const val = this.valList[this.head] - this.evict() - return val - } - } - - evict () { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - this.removeItemSize(head) - this.head = this.next[head] - this.keyMap.delete(k) - this.size -- - return head - } - - has (k) { - return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k)) - } - - // like get(), but without any LRU updating or TTL expiration - peek (k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - return this.valList[index] - } - } - - get (k, { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (this.isStale(index)) { - const value = allowStale ? this.valList[index] : undefined - this.delete(k) - return value - } else { - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return this.valList[index] - } - } - } - - connect (p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail (index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del () { - deprecatedMethod('del', 'delete') - return this.delete - } - delete (k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - this.dispose(this.valList[index], k, 'delete') - if (this.disposeAfter) { - this.disposed.push([this.valList[index], k, 'delete']) - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size -- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear () { - if (this.dispose !== LRUCache.prototype.dispose) { - for (const index of this.rindexes()) { - this.dispose(this.valList[index], this.keyList[index], 'delete') - } - } - if (this.disposeAfter) { - for (const index of this.rindexes()) { - this.disposed.push([this.valList[index], this.keyList[index], 'delete']) - } - } - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - get reset () { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length () { - deprecatedProperty('length', 'size') - return this.size - } -} - -module.exports = LRUCache diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/package.json deleted file mode 100644 index ae92116975dc9..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "7.4.0", - "author": "Isaac Z. Schlueter ", - "keywords": [ - "mru", - "lru", - "cache" - ], - "scripts": { - "prepare": "webpack-cli -o bundle ./index.js --node-env production", - "build": "npm run prepare", - "presize": "npm run prepare", - "test": "tap", - "snap": "tap", - "size": "size-limit", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "main": "index.js", - "browser": "./bundle/main.js", - "exports": { - ".": "./index.js", - "./browser": "./bundle/main.js" - }, - "repository": "git://github.com/isaacs/node-lru-cache.git", - "devDependencies": { - "@size-limit/preset-small-lib": "^7.0.8", - "benchmark": "^2.1.4", - "size-limit": "^7.0.8", - "tap": "^15.1.6", - "webpack-cli": "^4.9.2" - }, - "license": "ISC", - "files": [ - "index.js", - "bundle" - ], - "engines": { - "node": ">=12" - }, - "tap": { - "coverage-map": "map.js" - }, - "size-limit": [ - { - "path": "./bundle/main.js" - } - ] -} diff --git a/node_modules/cacache/lib/memoization.js b/node_modules/cacache/lib/memoization.js index e1b13dd5fd528..cd25f2013b4af 100644 --- a/node_modules/cacache/lib/memoization.js +++ b/node_modules/cacache/lib/memoization.js @@ -2,13 +2,11 @@ const LRU = require('lru-cache') -const MAX_SIZE = 50 * 1024 * 1024 // 50MB -const MAX_AGE = 3 * 60 * 1000 - const MEMOIZED = new LRU({ - max: MAX_SIZE, - maxAge: MAX_AGE, - length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, }) module.exports.clearMemoized = clearMemoized diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index b9efa92d9f3e0..1003230cd6363 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "16.0.0", + "version": "16.0.1", "cache-version": { "content": "2", "index": "5" @@ -50,10 +50,10 @@ "@npmcli/move-file": "^1.1.2", "chownr": "^2.0.0", "fs-minipass": "^2.1.0", - "glob": "^7.1.4", + "glob": "^7.2.0", "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", + "lru-cache": "^7.5.1", + "minipass": "^3.1.6", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE b/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE deleted file mode 100644 index 9b58a3e03d1df..0000000000000 --- a/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/index.js deleted file mode 100644 index e37f51616452e..0000000000000 --- a/node_modules/hosted-git-info/node_modules/lru-cache/index.js +++ /dev/null @@ -1,615 +0,0 @@ -const perf = typeof performance === 'object' && performance && - typeof performance.now === 'function' ? performance : Date - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} -const shouldWarn = (code) => typeof process === 'object' && - process && - !(process.noDeprecation || warned.has(code)) -const warn = (code, what, instead, fn) => { - warned.add(code) - process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => !isPosInt(max) ? null -: max <= Math.pow(2, 8) ? Uint8Array -: max <= Math.pow(2, 16) ? Uint16Array -: max <= Math.pow(2, 32) ? Uint32Array -: max <= Number.MAX_SAFE_INTEGER ? ZeroArray -: null - -class ZeroArray extends Array { - constructor (size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor (max) { - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push (n) { - this.heap[this.length++] = n - } - pop () { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor (options = {}) { - const { - max, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize, - sizeCalculation, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { - length, - maxAge, - stale, - } = options instanceof LRUCache ? {} : options - - if (!isPosInt(max)) { - throw new TypeError('max option must be an integer') - } - - const UintArray = getUintArray(max) - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize || 0 - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize') - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculating set to non-function') - } - } - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - - if (this.maxSize) { - if (!isPosInt(this.maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified') - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.updateAgeOnGet = !!updateAgeOnGet - this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified') - } - this.initializeTTLTracking() - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - initializeTTLTracking () { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - this.setItemTTL = (index, ttl) => { - this.starts[index] = ttl !== 0 ? perf.now() : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - this.updateItemAge = (index) => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout(() => cachedNow = 0, this.ttlResolution) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - this.isStale = (index) => { - return this.ttls[index] !== 0 && this.starts[index] !== 0 && - ((cachedNow || getNow()) - this.starts[index] > this.ttls[index]) - } - } - updateItemAge (index) {} - setItemTTL (index, ttl) {} - isStale (index) { return false } - - initializeSizeTracking () { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => this.calculatedSize -= this.sizes[index] - this.addItemSize = (index, v, k, size, sizeCalculation) => { - const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0) - this.sizes[index] = isPosInt(s) ? s : 0 - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict() - } - this.calculatedSize += this.sizes[index] - } - this.delete = k => { - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - this.calculatedSize -= this.sizes[index] - } - } - return LRUCache.prototype.delete.call(this, k) - } - } - removeItemSize (index) {} - addItemSize (index, v, k, size, sizeCalculation) {} - - *indexes ({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.tail, j; true; ) { - if (!this.isValidIndex(i)) { - break - } - j = i === this.head - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } else { - i = this.prev[i] - } - } - } - } - - *rindexes ({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.head, j; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - // either the tail now, or WAS the tail, and deleted - if (i === this.tail) { - break - } else { - i = this.next[i] - } - } - } - } - - isValidIndex (index) { - return this.keyMap.get(this.keyList[index]) === index - } - - *entries () { - for (const i of this.indexes()) { - yield [this.keyList[i], this.valList[i]] - } - } - *rentries () { - for (const i of this.rindexes()) { - yield [this.keyList[i], this.valList[i]] - } - } - - *keys () { - for (const i of this.indexes()) { - yield this.keyList[i] - } - } - *rkeys () { - for (const i of this.rindexes()) { - yield this.keyList[i] - } - } - - *values () { - for (const i of this.indexes()) { - yield this.valList[i] - } - } - *rvalues () { - for (const i of this.rindexes()) { - yield this.valList[i] - } - } - - [Symbol.iterator] () { - return this.entries() - } - - find (fn, getOptions = {}) { - for (const i of this.indexes()) { - if (fn(this.valList[i], this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach (fn, thisp = this) { - for (const i of this.indexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) - } - } - - rforEach (fn, thisp = this) { - for (const i of this.rindexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) - } - } - - get prune () { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale () { - let deleted = false - for (const i of this.rindexes({ allowStale: true })) { - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - } - return deleted - } - - dump () { - const arr = [] - for (const i of this.indexes()) { - const key = this.keyList[i] - const value = this.valList[i] - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load (arr) { - this.clear() - for (const [key, entry] of arr) { - this.set(key, entry.value, entry) - } - } - - dispose (v, k, reason) {} - - set (k, v, { - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - } = {}) { - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size ++ - this.addItemSize(index, v, k, size, sizeCalculation) - noUpdateTTL = false - } else { - // update - const oldVal = this.valList[index] - if (v !== oldVal) { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, v, k, size, sizeCalculation) - } - this.moveToTail(index) - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl) - } - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex () { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max) { - return this.evict() - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop () { - if (this.size) { - const val = this.valList[this.head] - this.evict() - return val - } - } - - evict () { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - this.removeItemSize(head) - this.head = this.next[head] - this.keyMap.delete(k) - this.size -- - return head - } - - has (k) { - return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k)) - } - - // like get(), but without any LRU updating or TTL expiration - peek (k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - return this.valList[index] - } - } - - get (k, { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (this.isStale(index)) { - const value = allowStale ? this.valList[index] : undefined - this.delete(k) - return value - } else { - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return this.valList[index] - } - } - } - - connect (p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail (index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del () { - deprecatedMethod('del', 'delete') - return this.delete - } - delete (k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - this.dispose(this.valList[index], k, 'delete') - if (this.disposeAfter) { - this.disposed.push([this.valList[index], k, 'delete']) - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size -- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear () { - if (this.dispose !== LRUCache.prototype.dispose) { - for (const index of this.rindexes({ allowStale: true })) { - this.dispose(this.valList[index], this.keyList[index], 'delete') - } - } - if (this.disposeAfter) { - for (const index of this.rindexes({ allowStale: true })) { - this.disposed.push([this.valList[index], this.keyList[index], 'delete']) - } - } - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - get reset () { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length () { - deprecatedProperty('length', 'size') - return this.size - } -} - -module.exports = LRUCache diff --git a/node_modules/lru-cache/LICENSE b/node_modules/lru-cache/LICENSE index 19129e315fe59..9b58a3e03d1df 100644 --- a/node_modules/lru-cache/LICENSE +++ b/node_modules/lru-cache/LICENSE @@ -1,6 +1,6 @@ The ISC License -Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/node_modules/lru-cache/index.js b/node_modules/lru-cache/index.js index 573b6b85b9779..e37f51616452e 100644 --- a/node_modules/lru-cache/index.js +++ b/node_modules/lru-cache/index.js @@ -1,334 +1,615 @@ -'use strict' - -// A linked list to keep track of recently-used-ness -const Yallist = require('yallist') - -const MAX = Symbol('max') -const LENGTH = Symbol('length') -const LENGTH_CALCULATOR = Symbol('lengthCalculator') -const ALLOW_STALE = Symbol('allowStale') -const MAX_AGE = Symbol('maxAge') -const DISPOSE = Symbol('dispose') -const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') -const LRU_LIST = Symbol('lruList') -const CACHE = Symbol('cache') -const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') - -const naiveLength = () => 1 - -// lruList is a yallist where the head is the youngest -// item, and the tail is the oldest. the list contains the Hit -// objects as the entries. -// Each Hit object has a reference to its Yallist.Node. This -// never changes. -// -// cache is a Map (or PseudoMap) that matches the keys to -// the Yallist.Node object. -class LRUCache { - constructor (options) { - if (typeof options === 'number') - options = { max: options } - - if (!options) - options = {} - - if (options.max && (typeof options.max !== 'number' || options.max < 0)) - throw new TypeError('max must be a non-negative number') - // Kind of weird to have a default max of Infinity, but oh well. - const max = this[MAX] = options.max || Infinity - - const lc = options.length || naiveLength - this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc - this[ALLOW_STALE] = options.stale || false - if (options.maxAge && typeof options.maxAge !== 'number') - throw new TypeError('maxAge must be a number') - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false - this.reset() +const perf = typeof performance === 'object' && performance && + typeof performance.now === 'function' ? performance : Date + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) } - - // resize the cache when the max changes. - set max (mL) { - if (typeof mL !== 'number' || mL < 0) - throw new TypeError('max must be a non-negative number') - - this[MAX] = mL || Infinity - trim(this) +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) } - get max () { - return this[MAX] +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) } +} +const shouldWarn = (code) => typeof process === 'object' && + process && + !(process.noDeprecation || warned.has(code)) +const warn = (code, what, instead, fn) => { + warned.add(code) + process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn) +} - set allowStale (allowStale) { - this[ALLOW_STALE] = !!allowStale - } - get allowStale () { - return this[ALLOW_STALE] +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => !isPosInt(max) ? null +: max <= Math.pow(2, 8) ? Uint8Array +: max <= Math.pow(2, 16) ? Uint16Array +: max <= Math.pow(2, 32) ? Uint32Array +: max <= Number.MAX_SAFE_INTEGER ? ZeroArray +: null + +class ZeroArray extends Array { + constructor (size) { + super(size) + this.fill(0) } +} - set maxAge (mA) { - if (typeof mA !== 'number') - throw new TypeError('maxAge must be a non-negative number') - - this[MAX_AGE] = mA - trim(this) +class Stack { + constructor (max) { + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 } - get maxAge () { - return this[MAX_AGE] + push (n) { + this.heap[this.length++] = n } + pop () { + return this.heap[--this.length] + } +} - // resize the cache when the lengthCalculator changes. - set lengthCalculator (lC) { - if (typeof lC !== 'function') - lC = naiveLength +class LRUCache { + constructor (options = {}) { + const { + max, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize, + sizeCalculation, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { + length, + maxAge, + stale, + } = options instanceof LRUCache ? {} : options + + if (!isPosInt(max)) { + throw new TypeError('max option must be an integer') + } - if (lC !== this[LENGTH_CALCULATOR]) { - this[LENGTH_CALCULATOR] = lC - this[LENGTH] = 0 - this[LRU_LIST].forEach(hit => { - hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) - this[LENGTH] += hit.length - }) + const UintArray = getUintArray(max) + if (!UintArray) { + throw new Error('invalid max value: ' + max) } - trim(this) - } - get lengthCalculator () { return this[LENGTH_CALCULATOR] } - get length () { return this[LENGTH] } - get itemCount () { return this[LRU_LIST].length } + this.max = max + this.maxSize = maxSize || 0 + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize') + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculating set to non-function') + } + } + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL - rforEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].tail; walker !== null;) { - const prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev + if (this.maxSize) { + if (!isPosInt(this.maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified') + } + this.initializeSizeTracking() } - } - forEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].head; walker !== null;) { - const next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next + this.allowStale = !!allowStale || !!stale + this.updateAgeOnGet = !!updateAgeOnGet + this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified') + } + this.initializeTTLTracking() } - } - keys () { - return this[LRU_LIST].toArray().map(k => k.key) + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } } - values () { - return this[LRU_LIST].toArray().map(k => k.value) + initializeTTLTracking () { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + this.setItemTTL = (index, ttl) => { + this.starts[index] = ttl !== 0 ? perf.now() : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + this.updateItemAge = (index) => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout(() => cachedNow = 0, this.ttlResolution) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + this.isStale = (index) => { + return this.ttls[index] !== 0 && this.starts[index] !== 0 && + ((cachedNow || getNow()) - this.starts[index] > this.ttls[index]) + } } - - reset () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + updateItemAge (index) {} + setItemTTL (index, ttl) {} + isStale (index) { return false } + + initializeSizeTracking () { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => this.calculatedSize -= this.sizes[index] + this.addItemSize = (index, v, k, size, sizeCalculation) => { + const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0) + this.sizes[index] = isPosInt(s) ? s : 0 + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict() + } + this.calculatedSize += this.sizes[index] + } + this.delete = k => { + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + this.calculatedSize -= this.sizes[index] + } + } + return LRUCache.prototype.delete.call(this, k) } - - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list } - - dump () { - return this[LRU_LIST].map(hit => - isStale(this, hit) ? false : { - k: hit.key, - v: hit.value, - e: hit.now + (hit.maxAge || 0) - }).toArray().filter(h => h) + removeItemSize (index) {} + addItemSize (index, v, k, size, sizeCalculation) {} + + *indexes ({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail, j; true; ) { + if (!this.isValidIndex(i)) { + break + } + j = i === this.head + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } } - dumpLru () { - return this[LRU_LIST] + *rindexes ({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head, j; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + // either the tail now, or WAS the tail, and deleted + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } } - set (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] + isValidIndex (index) { + return this.keyMap.get(this.keyList[index]) === index + } - if (maxAge && typeof maxAge !== 'number') - throw new TypeError('maxAge must be a number') + *entries () { + for (const i of this.indexes()) { + yield [this.keyList[i], this.valList[i]] + } + } + *rentries () { + for (const i of this.rindexes()) { + yield [this.keyList[i], this.valList[i]] + } + } - const now = maxAge ? Date.now() : 0 - const len = this[LENGTH_CALCULATOR](value, key) + *keys () { + for (const i of this.indexes()) { + yield this.keyList[i] + } + } + *rkeys () { + for (const i of this.rindexes()) { + yield this.keyList[i] + } + } - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) - return false - } + *values () { + for (const i of this.indexes()) { + yield this.valList[i] + } + } + *rvalues () { + for (const i of this.rindexes()) { + yield this.valList[i] + } + } - const node = this[CACHE].get(key) - const item = node.value + [Symbol.iterator] () { + return this.entries() + } - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) - this[DISPOSE](key, item.value) + find (fn, getOptions = {}) { + for (const i of this.indexes()) { + if (fn(this.valList[i], this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) } + } + } - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) - trim(this) - return true + forEach (fn, thisp = this) { + for (const i of this.indexes()) { + fn.call(thisp, this.valList[i], this.keyList[i], this) } + } - const hit = new Entry(key, value, len, now, maxAge) + rforEach (fn, thisp = this) { + for (const i of this.rindexes()) { + fn.call(thisp, this.valList[i], this.keyList[i], this) + } + } - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) - this[DISPOSE](key, value) + get prune () { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } - return false + purgeStale () { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } } + return deleted + } - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true + dump () { + const arr = [] + for (const i of this.indexes()) { + const key = this.keyList[i] + const value = this.valList[i] + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr } - has (key) { - if (!this[CACHE].has(key)) return false - const hit = this[CACHE].get(key).value - return !isStale(this, hit) + load (arr) { + this.clear() + for (const [key, entry] of arr) { + this.set(key, entry.value, entry) + } } - get (key) { - return get(this, key, true) + dispose (v, k, reason) {} + + set (k, v, { + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + } = {}) { + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size ++ + this.addItemSize(index, v, k, size, sizeCalculation) + noUpdateTTL = false + } else { + // update + const oldVal = this.valList[index] + if (v !== oldVal) { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, v, k, size, sizeCalculation) + } + this.moveToTail(index) + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl) + } + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this } - peek (key) { - return get(this, key, false) + newIndex () { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max) { + return this.evict() + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ } pop () { - const node = this[LRU_LIST].tail - if (!node) - return null + if (this.size) { + const val = this.valList[this.head] + this.evict() + return val + } + } + + evict () { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + this.removeItemSize(head) + this.head = this.next[head] + this.keyMap.delete(k) + this.size -- + return head + } - del(this, node) - return node.value + has (k) { + return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k)) } - del (key) { - del(this, this[CACHE].get(key)) + // like get(), but without any LRU updating or TTL expiration + peek (k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + return this.valList[index] + } } - load (arr) { - // reset the cache - this.reset() - - const now = Date.now() - // A previous serialized cache has the most recent items first - for (let l = arr.length - 1; l >= 0; l--) { - const hit = arr[l] - const expiresAt = hit.e || 0 - if (expiresAt === 0) - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - else { - const maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) + get (k, { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (this.isStale(index)) { + const value = allowStale ? this.valList[index] : undefined + this.delete(k) + return value + } else { + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) } + return this.valList[index] } } } - prune () { - this[CACHE].forEach((value, key) => get(this, key, false)) + connect (p, n) { + this.prev[n] = p + this.next[p] = n } -} -const get = (self, key, doUse) => { - const node = self[CACHE].get(key) - if (node) { - const hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - return undefined - } else { - if (doUse) { - if (self[UPDATE_AGE_ON_GET]) - node.value.now = Date.now() - self[LRU_LIST].unshiftNode(node) + moveToTail (index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) } + this.connect(this.tail, index) + this.tail = index } - return hit.value } -} - -const isStale = (self, hit) => { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) - return false - const diff = Date.now() - hit.now - return hit.maxAge ? diff > hit.maxAge - : self[MAX_AGE] && (diff > self[MAX_AGE]) -} - -const trim = self => { - if (self[LENGTH] > self[MAX]) { - for (let walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { - // We know that we're about to delete this one, and also - // what the next least recently used key will be, so just - // go ahead and set it now. - const prev = walker.prev - del(self, walker) - walker = prev + get del () { + deprecatedMethod('del', 'delete') + return this.delete + } + delete (k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + this.dispose(this.valList[index], k, 'delete') + if (this.disposeAfter) { + this.disposed.push([this.valList[index], k, 'delete']) + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size -- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } } + return deleted } -} -const del = (self, node) => { - if (node) { - const hit = node.value - if (self[DISPOSE]) - self[DISPOSE](hit.key, hit.value) - - self[LENGTH] -= hit.length - self[CACHE].delete(hit.key) - self[LRU_LIST].removeNode(node) + clear () { + if (this.dispose !== LRUCache.prototype.dispose) { + for (const index of this.rindexes({ allowStale: true })) { + this.dispose(this.valList[index], this.keyList[index], 'delete') + } + } + if (this.disposeAfter) { + for (const index of this.rindexes({ allowStale: true })) { + this.disposed.push([this.valList[index], this.keyList[index], 'delete']) + } + } + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } } -} - -class Entry { - constructor (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 + get reset () { + deprecatedMethod('reset', 'clear') + return this.clear } -} -const forEachStep = (self, fn, node, thisp) => { - let hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - hit = undefined + get length () { + deprecatedProperty('length', 'size') + return this.size } - if (hit) - fn.call(thisp, hit.value, hit.key, self) } module.exports = LRUCache diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json index 43b7502c3e7c7..a62f74c2b648a 100644 --- a/node_modules/lru-cache/package.json +++ b/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "6.0.0", + "version": "7.5.1", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -9,8 +9,10 @@ "cache" ], "scripts": { + "build": "", "test": "tap", "snap": "tap", + "size": "size-limit", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags" @@ -18,17 +20,24 @@ "main": "index.js", "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", "benchmark": "^2.1.4", - "tap": "^14.10.7" + "size-limit": "^7.0.8", + "tap": "^15.1.6" }, "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, "files": [ "index.js" ], "engines": { - "node": ">=10" - } + "node": ">=12" + }, + "tap": { + "coverage-map": "map.js" + }, + "size-limit": [ + { + "path": "./index.js" + } + ] } diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE b/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE deleted file mode 100644 index 9b58a3e03d1df..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/index.js b/node_modules/make-fetch-happen/node_modules/lru-cache/index.js deleted file mode 100644 index e37f51616452e..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/index.js +++ /dev/null @@ -1,615 +0,0 @@ -const perf = typeof performance === 'object' && performance && - typeof performance.now === 'function' ? performance : Date - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} -const shouldWarn = (code) => typeof process === 'object' && - process && - !(process.noDeprecation || warned.has(code)) -const warn = (code, what, instead, fn) => { - warned.add(code) - process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => !isPosInt(max) ? null -: max <= Math.pow(2, 8) ? Uint8Array -: max <= Math.pow(2, 16) ? Uint16Array -: max <= Math.pow(2, 32) ? Uint32Array -: max <= Number.MAX_SAFE_INTEGER ? ZeroArray -: null - -class ZeroArray extends Array { - constructor (size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor (max) { - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push (n) { - this.heap[this.length++] = n - } - pop () { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor (options = {}) { - const { - max, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize, - sizeCalculation, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { - length, - maxAge, - stale, - } = options instanceof LRUCache ? {} : options - - if (!isPosInt(max)) { - throw new TypeError('max option must be an integer') - } - - const UintArray = getUintArray(max) - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize || 0 - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize') - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculating set to non-function') - } - } - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - - if (this.maxSize) { - if (!isPosInt(this.maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified') - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.updateAgeOnGet = !!updateAgeOnGet - this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified') - } - this.initializeTTLTracking() - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - initializeTTLTracking () { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - this.setItemTTL = (index, ttl) => { - this.starts[index] = ttl !== 0 ? perf.now() : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - this.updateItemAge = (index) => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout(() => cachedNow = 0, this.ttlResolution) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - this.isStale = (index) => { - return this.ttls[index] !== 0 && this.starts[index] !== 0 && - ((cachedNow || getNow()) - this.starts[index] > this.ttls[index]) - } - } - updateItemAge (index) {} - setItemTTL (index, ttl) {} - isStale (index) { return false } - - initializeSizeTracking () { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => this.calculatedSize -= this.sizes[index] - this.addItemSize = (index, v, k, size, sizeCalculation) => { - const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0) - this.sizes[index] = isPosInt(s) ? s : 0 - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict() - } - this.calculatedSize += this.sizes[index] - } - this.delete = k => { - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - this.calculatedSize -= this.sizes[index] - } - } - return LRUCache.prototype.delete.call(this, k) - } - } - removeItemSize (index) {} - addItemSize (index, v, k, size, sizeCalculation) {} - - *indexes ({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.tail, j; true; ) { - if (!this.isValidIndex(i)) { - break - } - j = i === this.head - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } else { - i = this.prev[i] - } - } - } - } - - *rindexes ({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.head, j; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - // either the tail now, or WAS the tail, and deleted - if (i === this.tail) { - break - } else { - i = this.next[i] - } - } - } - } - - isValidIndex (index) { - return this.keyMap.get(this.keyList[index]) === index - } - - *entries () { - for (const i of this.indexes()) { - yield [this.keyList[i], this.valList[i]] - } - } - *rentries () { - for (const i of this.rindexes()) { - yield [this.keyList[i], this.valList[i]] - } - } - - *keys () { - for (const i of this.indexes()) { - yield this.keyList[i] - } - } - *rkeys () { - for (const i of this.rindexes()) { - yield this.keyList[i] - } - } - - *values () { - for (const i of this.indexes()) { - yield this.valList[i] - } - } - *rvalues () { - for (const i of this.rindexes()) { - yield this.valList[i] - } - } - - [Symbol.iterator] () { - return this.entries() - } - - find (fn, getOptions = {}) { - for (const i of this.indexes()) { - if (fn(this.valList[i], this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach (fn, thisp = this) { - for (const i of this.indexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) - } - } - - rforEach (fn, thisp = this) { - for (const i of this.rindexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) - } - } - - get prune () { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale () { - let deleted = false - for (const i of this.rindexes({ allowStale: true })) { - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - } - return deleted - } - - dump () { - const arr = [] - for (const i of this.indexes()) { - const key = this.keyList[i] - const value = this.valList[i] - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load (arr) { - this.clear() - for (const [key, entry] of arr) { - this.set(key, entry.value, entry) - } - } - - dispose (v, k, reason) {} - - set (k, v, { - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - } = {}) { - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size ++ - this.addItemSize(index, v, k, size, sizeCalculation) - noUpdateTTL = false - } else { - // update - const oldVal = this.valList[index] - if (v !== oldVal) { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, v, k, size, sizeCalculation) - } - this.moveToTail(index) - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl) - } - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex () { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max) { - return this.evict() - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop () { - if (this.size) { - const val = this.valList[this.head] - this.evict() - return val - } - } - - evict () { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - this.removeItemSize(head) - this.head = this.next[head] - this.keyMap.delete(k) - this.size -- - return head - } - - has (k) { - return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k)) - } - - // like get(), but without any LRU updating or TTL expiration - peek (k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - return this.valList[index] - } - } - - get (k, { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (this.isStale(index)) { - const value = allowStale ? this.valList[index] : undefined - this.delete(k) - return value - } else { - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return this.valList[index] - } - } - } - - connect (p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail (index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del () { - deprecatedMethod('del', 'delete') - return this.delete - } - delete (k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - this.dispose(this.valList[index], k, 'delete') - if (this.disposeAfter) { - this.disposed.push([this.valList[index], k, 'delete']) - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size -- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear () { - if (this.dispose !== LRUCache.prototype.dispose) { - for (const index of this.rindexes({ allowStale: true })) { - this.dispose(this.valList[index], this.keyList[index], 'delete') - } - } - if (this.disposeAfter) { - for (const index of this.rindexes({ allowStale: true })) { - this.disposed.push([this.valList[index], this.keyList[index], 'delete']) - } - } - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - get reset () { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length () { - deprecatedProperty('length', 'size') - return this.size - } -} - -module.exports = LRUCache diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json deleted file mode 100644 index a62f74c2b648a..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "7.5.1", - "author": "Isaac Z. Schlueter ", - "keywords": [ - "mru", - "lru", - "cache" - ], - "scripts": { - "build": "", - "test": "tap", - "snap": "tap", - "size": "size-limit", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "main": "index.js", - "repository": "git://github.com/isaacs/node-lru-cache.git", - "devDependencies": { - "@size-limit/preset-small-lib": "^7.0.8", - "benchmark": "^2.1.4", - "size-limit": "^7.0.8", - "tap": "^15.1.6" - }, - "license": "ISC", - "files": [ - "index.js" - ], - "engines": { - "node": ">=12" - }, - "tap": { - "coverage-map": "map.js" - }, - "size-limit": [ - { - "path": "./index.js" - } - ] -} diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE b/node_modules/semver/node_modules/lru-cache/LICENSE similarity index 92% rename from node_modules/@npmcli/git/node_modules/lru-cache/LICENSE rename to node_modules/semver/node_modules/lru-cache/LICENSE index 9b58a3e03d1df..19129e315fe59 100644 --- a/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE +++ b/node_modules/semver/node_modules/lru-cache/LICENSE @@ -1,6 +1,6 @@ The ISC License -Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors +Copyright (c) Isaac Z. Schlueter and Contributors Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/node_modules/semver/node_modules/lru-cache/index.js b/node_modules/semver/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..573b6b85b9779 --- /dev/null +++ b/node_modules/semver/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/package.json b/node_modules/semver/node_modules/lru-cache/package.json similarity index 67% rename from node_modules/hosted-git-info/node_modules/lru-cache/package.json rename to node_modules/semver/node_modules/lru-cache/package.json index a62f74c2b648a..43b7502c3e7c7 100644 --- a/node_modules/hosted-git-info/node_modules/lru-cache/package.json +++ b/node_modules/semver/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.5.1", + "version": "6.0.0", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -9,10 +9,8 @@ "cache" ], "scripts": { - "build": "", "test": "tap", "snap": "tap", - "size": "size-limit", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags" @@ -20,24 +18,17 @@ "main": "index.js", "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { - "@size-limit/preset-small-lib": "^7.0.8", "benchmark": "^2.1.4", - "size-limit": "^7.0.8", - "tap": "^15.1.6" + "tap": "^14.10.7" }, "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, "files": [ "index.js" ], "engines": { - "node": ">=12" - }, - "tap": { - "coverage-map": "map.js" - }, - "size-limit": [ - { - "path": "./index.js" - } - ] + "node": ">=10" + } } diff --git a/package-lock.json b/package-lock.json index 8fc2c375b0342..2b9388cad4303 100644 --- a/package-lock.json +++ b/package-lock.json @@ -97,7 +97,7 @@ "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "archy": "~1.0.0", - "cacache": "^16.0.0", + "cacache": "^16.0.1", "chalk": "^4.1.2", "chownr": "^2.0.0", "cli-columns": "^4.0.0", @@ -897,15 +897,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/@npmcli/git/node_modules/lru-cache": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.4.0.tgz", - "integrity": "sha512-YOfuyWa/Ee+PXbDm40j9WXyJrzQUynVbgn4Km643UYcWNcrSfRkKL0WaiUcxcIbkXcVTgNpDqSnPXntWXT75cw==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, "node_modules/@npmcli/installed-package-contents": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-1.0.7.tgz", @@ -1613,19 +1604,19 @@ "inBundle": true }, "node_modules/cacache": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", - "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", + "version": "16.0.1", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.1.tgz", + "integrity": "sha512-tHPtfdZDqQpZ15eaEZeLspIqS5mK5fOBDZi6AjuqaIi53QNVXH3dQv6uKT3YuUu6uxV/8pjU9in0CoJ8fgaHqw==", "inBundle": true, "dependencies": { "@npmcli/fs": "^1.0.0", "@npmcli/move-file": "^1.1.2", "chownr": "^2.0.0", "fs-minipass": "^2.1.0", - "glob": "^7.1.4", + "glob": "^7.2.0", "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", + "lru-cache": "^7.5.1", + "minipass": "^3.1.6", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", @@ -3794,15 +3785,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, "node_modules/html-encoding-sniffer": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", @@ -4951,15 +4933,12 @@ } }, "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", "inBundle": true, - "dependencies": { - "yallist": "^4.0.0" - }, "engines": { - "node": ">=10" + "node": ">=12" } }, "node_modules/make-dir": { @@ -5021,15 +5000,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/make-fetch-happen/node_modules/lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, "node_modules/markdown-escapes": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", @@ -6890,6 +6860,18 @@ "node": ">=10" } }, + "node_modules/semver/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "inBundle": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", @@ -11441,13 +11423,6 @@ "promise-retry": "^2.0.1", "semver": "^7.3.5", "which": "^2.0.2" - }, - "dependencies": { - "lru-cache": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.4.0.tgz", - "integrity": "sha512-YOfuyWa/Ee+PXbDm40j9WXyJrzQUynVbgn4Km643UYcWNcrSfRkKL0WaiUcxcIbkXcVTgNpDqSnPXntWXT75cw==" - } } }, "@npmcli/installed-package-contents": { @@ -11986,18 +11961,18 @@ "integrity": "sha1-y5T662HIaWRR2zZTThQi+U8K7og=" }, "cacache": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.0.tgz", - "integrity": "sha512-pMX6sqJSlGpxCM257by5syifGb7zH6C30CaJXeGXqmKNrHKqvMmwM8KgKmsZcUAsnNQkt7WvENH2Kl53RpFQuA==", + "version": "16.0.1", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.1.tgz", + "integrity": "sha512-tHPtfdZDqQpZ15eaEZeLspIqS5mK5fOBDZi6AjuqaIi53QNVXH3dQv6uKT3YuUu6uxV/8pjU9in0CoJ8fgaHqw==", "requires": { "@npmcli/fs": "^1.0.0", "@npmcli/move-file": "^1.1.2", "chownr": "^2.0.0", "fs-minipass": "^2.1.0", - "glob": "^7.1.4", + "glob": "^7.2.0", "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", + "lru-cache": "^7.5.1", + "minipass": "^3.1.6", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", @@ -13605,13 +13580,6 @@ "integrity": "sha512-rRnjWu0Bxj+nIfUOkz0695C0H6tRrN5iYIzYejb0tDEefe2AekHu/U5Kn9pEie5vsJqpNQU02az7TGSH3qpz4Q==", "requires": { "lru-cache": "^7.5.1" - }, - "dependencies": { - "lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" - } } }, "html-encoding-sniffer": { @@ -14700,12 +14668,9 @@ "dev": true }, "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", + "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" }, "make-dir": { "version": "3.1.0", @@ -14753,13 +14718,6 @@ "promise-retry": "^2.0.1", "socks-proxy-agent": "^6.1.1", "ssri": "^8.0.1" - }, - "dependencies": { - "lru-cache": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.5.1.tgz", - "integrity": "sha512-q1TS8IqKvcg3aScamKCHpepSrHF537Ww7nHahBOxhDu9D2YoBXAsj/7uFdZFj1xJr9LmyeJ62AdyofCHafUbIA==" - } } }, "markdown-escapes": { @@ -16147,6 +16105,16 @@ "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", "requires": { "lru-cache": "^6.0.0" + }, + "dependencies": { + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + } } }, "set-blocking": { diff --git a/package.json b/package.json index 4cf42d3e50c23..faf59dc5d34d4 100644 --- a/package.json +++ b/package.json @@ -65,7 +65,7 @@ "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "archy": "~1.0.0", - "cacache": "^16.0.0", + "cacache": "^16.0.1", "chalk": "^4.1.2", "chownr": "^2.0.0", "cli-columns": "^4.0.0",