diff --git a/package-lock.json b/package-lock.json index a0b7f40f..0a177909 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10367,6 +10367,16 @@ "integrity": "sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA==", "dev": true }, + "node_modules/@types/promise-retry": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/@types/promise-retry/-/promise-retry-1.1.6.tgz", + "integrity": "sha512-EC1+OMXV0PZb0pf+cmyxc43MEP2CDumZe4AfuxWboxxEixztIebknpJPZAX5XlodGF1OY+C1E/RAeNGzxf+bJA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/retry": "*" + } + }, "node_modules/@types/prop-types": { "version": "15.7.11", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.11.tgz", @@ -10426,6 +10436,13 @@ "@types/node": "*" } }, + "node_modules/@types/retry": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.5.tgz", + "integrity": "sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/s3rver": { "version": "3.7.0", "resolved": "https://registry.npmjs.org/@types/s3rver/-/s3rver-3.7.0.tgz", @@ -10447,6 +10464,13 @@ "integrity": "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==", "license": "MIT" }, + "node_modules/@types/signal-exit": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/signal-exit/-/signal-exit-3.0.4.tgz", + "integrity": "sha512-e7EUPfU9afHyWc5CXtlqbvVHEshrb05uPlDCenWIbMgtWoFrTuTDVYNLKk6o4X2/4oHTfNqrJX/vaJ3uBhtXTg==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/sinon": { "version": "9.0.11", "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-9.0.11.tgz", @@ -25379,6 +25403,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "license": "MIT", "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" @@ -30686,6 +30711,8 @@ "decompress": "^4.2.1", "mongodb-download-url": "^1.6.3", "node-fetch": "^2.7.0", + "promise-retry": "^2.0.1", + "signal-exit": "^4.1.0", "tar": "^6.1.15" }, "devDependencies": { @@ -30697,6 +30724,8 @@ "@types/decompress": "^4.2.4", "@types/mocha": "^9.1.1", "@types/node": "^22.15.30", + "@types/promise-retry": "^1.1.6", + "@types/signal-exit": "^3.0.4", "@types/tar": "^6.1.5", "depcheck": "^1.4.7", "eslint": "^7.25.0", @@ -30735,6 +30764,18 @@ } } }, + "packages/mongodb-downloader/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "packages/mongodb-downloader/node_modules/tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", @@ -38900,6 +38941,8 @@ "@types/decompress": "^4.2.4", "@types/mocha": "^9.1.1", "@types/node": "^22.15.30", + "@types/promise-retry": "^1.1.6", + "@types/signal-exit": "^3.0.4", "@types/tar": "^6.1.5", "debug": "^4.4.0", "decompress": "^4.2.1", @@ -38911,6 +38954,8 @@ "node-fetch": "^2.7.0", "nyc": "^15.1.0", "prettier": "^3.5.3", + "promise-retry": "^2.0.1", + "signal-exit": "^4.1.0", "tar": "^6.1.15", "typescript": "^5.0.4" }, @@ -38928,6 +38973,11 @@ "whatwg-url": "^5.0.0" } }, + "signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" + }, "tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", @@ -41699,6 +41749,15 @@ "integrity": "sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA==", "dev": true }, + "@types/promise-retry": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/@types/promise-retry/-/promise-retry-1.1.6.tgz", + "integrity": "sha512-EC1+OMXV0PZb0pf+cmyxc43MEP2CDumZe4AfuxWboxxEixztIebknpJPZAX5XlodGF1OY+C1E/RAeNGzxf+bJA==", + "dev": true, + "requires": { + "@types/retry": "*" + } + }, "@types/prop-types": { "version": "15.7.11", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.11.tgz", @@ -41757,6 +41816,12 @@ "@types/node": "*" } }, + "@types/retry": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.5.tgz", + "integrity": "sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==", + "dev": true + }, "@types/s3rver": { "version": "3.7.0", "resolved": "https://registry.npmjs.org/@types/s3rver/-/s3rver-3.7.0.tgz", @@ -41777,6 +41842,12 @@ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.0.tgz", "integrity": "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==" }, + "@types/signal-exit": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/signal-exit/-/signal-exit-3.0.4.tgz", + "integrity": "sha512-e7EUPfU9afHyWc5CXtlqbvVHEshrb05uPlDCenWIbMgtWoFrTuTDVYNLKk6o4X2/4oHTfNqrJX/vaJ3uBhtXTg==", + "dev": true + }, "@types/sinon": { "version": "9.0.11", "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-9.0.11.tgz", diff --git a/packages/mongodb-downloader/package.json b/packages/mongodb-downloader/package.json index 2b1da795..2fbfb9da 100644 --- a/packages/mongodb-downloader/package.json +++ b/packages/mongodb-downloader/package.json @@ -53,10 +53,12 @@ }, "dependencies": { "debug": "^4.4.0", - "tar": "^6.1.15", "decompress": "^4.2.1", - "mongodb-download-url": "^1.6.3", - "node-fetch": "^2.7.0" + "node-fetch": "^2.7.0", + "promise-retry": "^2.0.1", + "signal-exit": "^4.1.0", + "tar": "^6.1.15", + "mongodb-download-url": "^1.6.3" }, "devDependencies": { "@mongodb-js/eslint-config-devtools": "0.9.12", @@ -67,6 +69,8 @@ "@types/decompress": "^4.2.4", "@types/mocha": "^9.1.1", "@types/node": "^22.15.30", + "@types/promise-retry": "^1.1.6", + "@types/signal-exit": "^3.0.4", "@types/tar": "^6.1.5", "depcheck": "^1.4.7", "eslint": "^7.25.0", diff --git a/packages/mongodb-downloader/src/download-integration.spec.ts b/packages/mongodb-downloader/src/download-integration.spec.ts new file mode 100644 index 00000000..eebd08e1 --- /dev/null +++ b/packages/mongodb-downloader/src/download-integration.spec.ts @@ -0,0 +1,126 @@ +import { expect } from 'chai'; +import { promises as fs } from 'fs'; +import path from 'path'; +import os from 'os'; +import { MongoDbDownloader } from './index'; + +describe('downloader with Locking', function () { + this.timeout(60000); + + let tmpDir: string; + + beforeEach(async function () { + tmpDir = path.join(os.tmpdir(), `download-integration-tests-${Date.now()}`); + await fs.mkdir(tmpDir, { recursive: true }); + }); + const version = '8.2.0'; + + afterEach(async function () { + try { + await fs.rm(tmpDir, { recursive: true }); + } catch { + // Ignore cleanup errors + } + }); + + it('should prevent concurrent downloads of the same version', async function () { + const downloader = new MongoDbDownloader({ tmpdir: tmpDir }); + + const results = await Promise.all([ + downloader.downloadMongoDbWithVersionInfo(version), + downloader.downloadMongoDbWithVersionInfo(version), + downloader.downloadMongoDbWithVersionInfo(version), + ]); + + // All results should be identical + expect(results[0].version).to.equal(version); + expect(results[1].version).to.equal(version); + expect(results[2].version).to.equal(version); + + expect(results[0].downloadedBinDir).to.equal(results[1].downloadedBinDir); + expect(results[1].downloadedBinDir).to.equal(results[2].downloadedBinDir); + + // Verify the downloaded directory exists and contains mongod + expect(await fs.stat(results[0].downloadedBinDir)).to.be.ok; + expect(await fs.stat(path.join(results[0].downloadedBinDir, 'mongod'))).to + .be.ok; + }); + + it('should wait for existing download to complete', async function () { + // First, download MongoDB normally + const downloader = new MongoDbDownloader({ tmpdir: tmpDir }); + const result = await downloader.downloadMongoDbWithVersionInfo(version); + + expect(result.version).to.equal(version); + expect(result.downloadedBinDir).to.be.a('string'); + + // Verify the downloaded directory exists and contains mongod + expect(await fs.stat(result.downloadedBinDir)).to.be.ok; + expect(await fs.stat(path.join(result.downloadedBinDir, 'mongod'))).to.be + .ok; + }); + + it('should skip download if already completed', async function () { + // First download + const downloader = new MongoDbDownloader({ tmpdir: tmpDir }); + const result1 = await downloader.downloadMongoDbWithVersionInfo(version); + + // Second download should use cached result + const result2 = await downloader.downloadMongoDbWithVersionInfo(version); + + expect(result1.version).to.equal(version); + expect(result2.version).to.equal(version); + expect(result1.downloadedBinDir).to.equal(result2.downloadedBinDir); + + // Verify the downloaded directory exists and contains mongod + expect(await fs.stat(result1.downloadedBinDir)).to.be.ok; + expect(await fs.stat(path.join(result1.downloadedBinDir, 'mongod'))).to.be + .ok; + }); + + it('should handle different versions independently', async function () { + const version2 = '8.1.0'; + + // Download different versions + const downloader = new MongoDbDownloader({ tmpdir: tmpDir }); + const [result1, result2] = await Promise.all([ + downloader.downloadMongoDbWithVersionInfo(version), + downloader.downloadMongoDbWithVersionInfo(version2), + ]); + + expect(result1.version).to.equal(version); + expect(result2.version).to.equal(version2); + expect(result1.downloadedBinDir).to.not.equal(result2.downloadedBinDir); + + // Verify both downloaded directories exist and contain mongod + expect(await fs.stat(result1.downloadedBinDir)).to.be.ok; + expect(await fs.stat(path.join(result1.downloadedBinDir, 'mongod'))).to.be + .ok; + expect(await fs.stat(result2.downloadedBinDir)).to.be.ok; + expect(await fs.stat(path.join(result2.downloadedBinDir, 'mongod'))).to.be + .ok; + }); + + it('should handle promise caching correctly', async function () { + const version = '8.2.0'; + + // Start multiple downloads in sequence (not parallel) + const downloader = new MongoDbDownloader({ tmpdir: tmpDir }); + const result1 = await downloader.downloadMongoDbWithVersionInfo(version); + const result2 = await downloader.downloadMongoDbWithVersionInfo(version); + const result3 = await downloader.downloadMongoDbWithVersionInfo(version); + + // All should return the same result + expect(result1.version).to.equal(version); + expect(result2.version).to.equal(version); + expect(result3.version).to.equal(version); + + expect(result1.downloadedBinDir).to.equal(result2.downloadedBinDir); + expect(result2.downloadedBinDir).to.equal(result3.downloadedBinDir); + + // Verify the downloaded directory exists and contains mongod + expect(await fs.stat(result1.downloadedBinDir)).to.be.ok; + expect(await fs.stat(path.join(result1.downloadedBinDir, 'mongod'))).to.be + .ok; + }); +}); diff --git a/packages/mongodb-downloader/src/index.ts b/packages/mongodb-downloader/src/index.ts index b554ed14..f48f3e81 100644 --- a/packages/mongodb-downloader/src/index.ts +++ b/packages/mongodb-downloader/src/index.ts @@ -12,7 +12,8 @@ import type { DownloadArtifactInfo, } from 'mongodb-download-url'; import createDebug from 'debug'; -const debug = createDebug('mongodb-downloader'); +import { withLock } from './npm-with-lock'; +export const debug = createDebug('mongodb-downloader'); export type { DownloadOptions }; @@ -20,144 +21,195 @@ export type DownloadResult = DownloadArtifactInfo & { downloadedBinDir: string; }; -// Download mongod + mongos and return the path to a directory containing them. -export async function downloadMongoDbWithVersionInfo( - tmpdir: string, - targetVersionSemverSpecifier = '*', - options: DownloadOptions = {}, -): Promise { - let wantsEnterprise = options.enterprise ?? false; - const isWindows = ['win32', 'windows'].includes( - options.platform ?? process.platform, - ); - async function lookupDownloadUrl(): Promise { +export class MongoDbDownloader { + private tmpdir: string; + + constructor({ tmpdir }: { tmpdir: string }) { + this.tmpdir = tmpdir; + } + + private downloadPromises: Record> = + Object.create(null); + + // Download mongod + mongos and return the path to a directory containing them. + async downloadMongoDbWithVersionInfo( + targetVersion = '*', + options: DownloadOptions = {}, + ): Promise { + await fs.mkdir(this.tmpdir, { recursive: true }); + if (targetVersion === 'latest-alpha') { + return await this.doDownload('latest-alpha', options); + } + + return await this.doDownload(targetVersion, options); + } + + private async lookupDownloadUrl({ + targetVersion, + enterprise, + options, + }: { + targetVersion: string; + enterprise: boolean; + options: DownloadOptions; + }): Promise { return await getDownloadURL({ - version: targetVersionSemverSpecifier, - enterprise: wantsEnterprise, + version: targetVersion, + enterprise, ...options, }); } - await fs.mkdir(tmpdir, { recursive: true }); - if (targetVersionSemverSpecifier === 'latest-alpha') { - return await doDownload( - tmpdir, - !!options.crypt_shared, - 'latest-alpha', - isWindows, - lookupDownloadUrl, + private async doDownload( + version: string, + options: DownloadOptions, + ): Promise { + const isWindows = ['win32', 'windows'].includes( + options.platform ?? process.platform, ); - } + const isCryptLibrary = !!options.crypt_shared; + const isEnterprise = options.enterprise ?? false; - if (/-enterprise$/.test(targetVersionSemverSpecifier)) { - wantsEnterprise = true; - targetVersionSemverSpecifier = targetVersionSemverSpecifier.replace( - /-enterprise$/, - '', + const downloadTarget = path.resolve( + this.tmpdir, + `mongodb-${process.platform}-${process.env.DISTRO_ID || 'none'}-${ + process.arch + }-${version}`.replace(/[^a-zA-Z0-9_-]/g, ''), ); - } + return (this.downloadPromises[downloadTarget] ??= (async () => { + const bindir = path.resolve( + downloadTarget, + isCryptLibrary && !isWindows ? 'lib' : 'bin', + ); - return await doDownload( - tmpdir, - !!options.crypt_shared, - targetVersionSemverSpecifier + - (wantsEnterprise ? '-enterprise' : '-community'), - isWindows, - () => lookupDownloadUrl(), - ); -} + const artifactInfoFile = path.join(bindir, '.artifact_info'); + const lockPath = `${downloadTarget}.lock`; -const downloadPromises: Record> = Object.create( - null, -); -async function doDownload( - tmpdir: string, - isCryptLibrary: boolean, - version: string, - isWindows: boolean, - lookupDownloadUrl: () => Promise, -): Promise { - const downloadTarget = path.resolve( - tmpdir, - `mongodb-${process.platform}-${process.env.DISTRO_ID || 'none'}-${ - process.arch - }-${version}`.replace(/[^a-zA-Z0-9_-]/g, ''), - ); - return (downloadPromises[downloadTarget] ??= (async () => { - const bindir = path.resolve( - downloadTarget, - isCryptLibrary && !isWindows ? 'lib' : 'bin', - ); - const artifactInfoFile = path.join(bindir, '.artifact_info'); + // Check if already downloaded before acquiring lock + const currentDownloadedFile = await this.getCurrentDownloadedFile({ + bindir, + artifactInfoFile, + }); + if (currentDownloadedFile) { + debug(`Skipping download because ${downloadTarget} exists`); + return currentDownloadedFile; + } + + // Acquire the lock and perform download + return await withLock(lockPath, async (signal) => { + // Check again inside lock in case another process downloaded it + const downloadedFile = await this.getCurrentDownloadedFile({ + bindir, + artifactInfoFile, + }); + if (downloadedFile) { + debug( + `Skipping download because ${downloadTarget} exists (checked inside lock)`, + ); + return downloadedFile; + } + + await fs.mkdir(downloadTarget, { recursive: true }); + const artifactInfo = await this.lookupDownloadUrl({ + targetVersion: version, + enterprise: isEnterprise, + options, + }); + const { url } = artifactInfo; + debug(`Downloading ${url} into ${downloadTarget}...`); + + await this.downloadAndExtract({ + url, + downloadTarget, + isCryptLibrary, + bindir, + }); + await fs.writeFile(artifactInfoFile, JSON.stringify(artifactInfo)); + debug(`Download complete`, bindir); + return { ...artifactInfo, downloadedBinDir: bindir }; + }); + })()); + } + + private async getCurrentDownloadedFile({ + bindir, + artifactInfoFile, + }: { + bindir: string; + artifactInfoFile: string; + }): Promise { try { await fs.stat(artifactInfoFile); - debug(`Skipping download because ${downloadTarget} exists`); return { ...JSON.parse(await fs.readFile(artifactInfoFile, 'utf8')), downloadedBinDir: bindir, }; } catch { - /* ignore */ + /* ignore - file doesn't exist, proceed with download */ + } + } + + // Using a large highWaterMark setting noticeably speeds up Windows downloads + private static HWM = 1024 * 1024; + + // eslint-disable-next-line no-inner-declarations + private async downloadAndExtract({ + withExtraStripDepth = 0, + downloadTarget, + isCryptLibrary, + bindir, + url, + }: { + withExtraStripDepth?: number; + downloadTarget: string; + isCryptLibrary: boolean; + bindir: string; + url: string; + }): Promise { + const response = await fetch(url, { + highWaterMark: MongoDbDownloader.HWM, + } as Parameters[1]); + if (/\.tgz$|\.tar(\.[^.]+)?$/.exec(url)) { + // the server's tarballs can contain hard links, which the (unmaintained?) + // `download` package is unable to handle (https://github.com/kevva/decompress/issues/93) + await promisify(pipeline)( + response.body, + tar.x({ cwd: downloadTarget, strip: isCryptLibrary ? 0 : 1 }), + ); + } else { + const filename = path.join( + downloadTarget, + path.basename(new URL(url).pathname), + ); + await promisify(pipeline)( + response.body, + createWriteStream(filename, { highWaterMark: MongoDbDownloader.HWM }), + ); + debug(`Written file ${url} to ${filename}, extracting...`); + await decompress(filename, downloadTarget, { + strip: isCryptLibrary ? 0 : 1, + filter: (file) => path.extname(file.path) !== '.pdb', // Windows .pdb files are huge and useless + }); } - await fs.mkdir(downloadTarget, { recursive: true }); - const artifactInfo = await lookupDownloadUrl(); - const { url } = artifactInfo; - debug(`Downloading ${url} into ${downloadTarget}...`); - - // Using a large highWaterMark setting noticeably speeds up Windows downloads - const HWM = 1024 * 1024; - async function downloadAndExtract(withExtraStripDepth = 0): Promise { - const response = await fetch(url, { - highWaterMark: HWM, - } as any); - if (/\.tgz$|\.tar(\.[^.]+)?$/.exec(url)) { - // the server's tarballs can contain hard links, which the (unmaintained?) - // `download` package is unable to handle (https://github.com/kevva/decompress/issues/93) - await promisify(pipeline)( - response.body, - tar.x({ cwd: downloadTarget, strip: isCryptLibrary ? 0 : 1 }), - ); - } else { - const filename = path.join( + try { + await fs.stat(bindir); // Make sure it exists. + } catch (err) { + if (withExtraStripDepth === 0 && url.includes('macos')) { + // The server team changed how macos release artifacts are packed + // and added a `./` prefix to paths in the tarball, + // which seems like it shouldn't change anything but does + // in fact require an increased path strip depth. + console.info('Retry due to miscalculated --strip-components depth'); + return await this.downloadAndExtract({ + withExtraStripDepth: 1, + url, downloadTarget, - path.basename(new URL(url).pathname), - ); - await promisify(pipeline)( - response.body, - createWriteStream(filename, { highWaterMark: HWM }), - ); - debug(`Written file ${url} to ${filename}, extracting...`); - await decompress(filename, downloadTarget, { - strip: isCryptLibrary ? 0 : 1, - filter: (file) => path.extname(file.path) !== '.pdb', // Windows .pdb files are huge and useless + isCryptLibrary, + bindir, }); } - - try { - await fs.stat(bindir); // Make sure it exists. - } catch (err) { - if (withExtraStripDepth === 0 && url.includes('macos')) { - // The server team changed how macos release artifacts are packed - // and added a `./` prefix to paths in the tarball, - // which seems like it shouldn't change anything but does - // in fact require an increased path strip depth. - console.info('Retry due to miscalculated --strip-components depth'); - return await downloadAndExtract(1); - } - throw err; - } + throw err; } - - await downloadAndExtract(); - await fs.writeFile(artifactInfoFile, JSON.stringify(artifactInfo)); - debug(`Download complete`, bindir); - return { ...artifactInfo, downloadedBinDir: bindir }; - })()); -} - -export async function downloadMongoDb( - ...args: Parameters -): Promise { - return (await downloadMongoDbWithVersionInfo(...args)).downloadedBinDir; + } } diff --git a/packages/mongodb-downloader/src/npm-with-lock.ts b/packages/mongodb-downloader/src/npm-with-lock.ts new file mode 100644 index 00000000..7f2afebb --- /dev/null +++ b/packages/mongodb-downloader/src/npm-with-lock.ts @@ -0,0 +1,406 @@ +// Adapted from: +// https://raw.githubusercontent.com/npm/cli/072253549d774893a3689341dbc660cb845ebcfe/workspaces/libnpmexec/lib/with-lock.js + +// The Artistic License 2.0 +// Copyright (c) 2000-2006, The Perl Foundation. +// +// Everyone is permitted to copy and distribute verbatim copies +// of this license document, but changing it is not allowed. +// +// Preamble +// +// This license establishes the terms under which a given free software +// Package may be copied, modified, distributed, and/or redistributed. +// The intent is that the Copyright Holder maintains some artistic +// control over the development of that Package while still keeping the +// Package available as open source and free software. + +// You are always permitted to make arrangements wholly outside of this +// license directly with the Copyright Holder of a given Package. If the +// terms of this license do not permit the full use that you propose to +// make of the Package, you should contact the Copyright Holder and seek +// a different licensing arrangement. +// +// Definitions +// +// "Copyright Holder" means the individual(s) or organization(s) +// named in the copyright notice for the entire Package. +// +// "Contributor" means any party that has contributed code or other +// material to the Package, in accordance with the Copyright Holder's +// procedures. +// +// "You" and "your" means any person who would like to copy, +// distribute, or modify the Package. +// +// "Package" means the collection of files distributed by the +// Copyright Holder, and derivatives of that collection and/or of +// those files. A given Package may consist of either the Standard +// Version, or a Modified Version. +// +// "Distribute" means providing a copy of the Package or making it +// accessible to anyone else, or in the case of a company or +// organization, to others outside of your company or organization. +// +// "Distributor Fee" means any fee that you charge for Distributing +// this Package or providing support for this Package to another +// party. It does not mean licensing fees. +// +// "Standard Version" refers to the Package if it has not been +// modified, or has been modified only in ways explicitly requested +// by the Copyright Holder. +// +// "Modified Version" means the Package, if it has been changed, and +// such changes were not explicitly requested by the Copyright +// Holder. +// +// "Original License" means this Artistic License as Distributed with +// the Standard Version of the Package, in its current version or as +// it may be modified by The Perl Foundation in the future. +// +// "Source" form means the source code, documentation source, and +// configuration files for the Package. +// +// "Compiled" form means the compiled bytecode, object code, binary, +// or any other form resulting from mechanical transformation or +// translation of the Source form. +// +// Permission for Use and Modification Without Distribution + +// (1) You are permitted to use the Standard Version and create and use +// Modified Versions for any purpose without restriction, provided that +// you do not Distribute the Modified Version. +// +// Permissions for Redistribution of the Standard Version + +// (2) You may Distribute verbatim copies of the Source form of the +// Standard Version of this Package in any medium without restriction, +// either gratis or for a Distributor Fee, provided that you duplicate +// all of the original copyright notices and associated disclaimers. At +// your discretion, such verbatim copies may or may not include a +// Compiled form of the Package. + +// (3) You may apply any bug fixes, portability changes, and other +// modifications made available from the Copyright Holder. The resulting +// Package will still be considered the Standard Version, and as such +// will be subject to the Original License. +// +// Distribution of Modified Versions of the Package as Source +// +// (4) You may Distribute your Modified Version as Source (either gratis +// or for a Distributor Fee, and with or without a Compiled form of the +// Modified Version) provided that you clearly document how it differs +// from the Standard Version, including, but not limited to, documenting +// any non-standard features, executables, or modules, and provided that +// you do at least ONE of the following: +// +// (a) make the Modified Version available to the Copyright Holder +// of the Standard Version, under the Original License, so that the +// Copyright Holder may include your modifications in the Standard +// Version. +// +// (b) ensure that installation of your Modified Version does not +// prevent the user installing or running the Standard Version. In +// addition, the Modified Version must bear a name that is different +// from the name of the Standard Version. +// +// (c) allow anyone who receives a copy of the Modified Version to +// make the Source form of the Modified Version available to others +// under + +// (i) the Original License or +// +// (ii) a license that permits the licensee to freely copy, +// modify and redistribute the Modified Version using the same +// licensing terms that apply to the copy that the licensee +// received, and requires that the Source form of the Modified +// Version, and of any works derived from it, be made freely +// available in that license fees are prohibited but Distributor +// Fees are allowed. +// +// Distribution of Compiled Forms of the Standard Version +// or Modified Versions without the Source +// +// (5) You may Distribute Compiled forms of the Standard Version without +// the Source, provided that you include complete instructions on how to +// get the Source of the Standard Version. Such instructions must be +// valid at the time of your distribution. If these instructions, at any +// time while you are carrying out such distribution, become invalid, you +// must provide new instructions on demand or cease further distribution. +// If you provide valid instructions or cease distribution within thirty +// days after you become aware that the instructions are invalid, then +// you do not forfeit any of your rights under this license. +// +// (6) You may Distribute a Modified Version in Compiled form without +// the Source, provided that you comply with Section 4 with respect to +// the Source of the Modified Version. +// +// Aggregating or Linking the Package + +// (7) You may aggregate the Package (either the Standard Version or +// Modified Version) with other packages and Distribute the resulting +// aggregation provided that you do not charge a licensing fee for the +// Package. Distributor Fees are permitted, and licensing fees for other +// components in the aggregation are permitted. The terms of this license +// apply to the use and Distribution of the Standard or Modified Versions +// as included in the aggregation. +// +// (8) You are permitted to link Modified and Standard Versions with +// other works, to embed the Package in a larger work of your own, or to +// build stand-alone binary or bytecode versions of applications that +// include the Package, and Distribute the result without restriction, +// provided the result does not expose a direct interface to the Package. +// +// Items That are Not Considered Part of a Modified Version + +// (9) Works (including, but not limited to, modules and scripts) that +// merely extend or make use of the Package, do not, by themselves, cause +// the Package to be a Modified Version. In addition, such works are not +// considered parts of the Package itself, and are not subject to the +// terms of this license. +// +// General Provisions + +// (10) Any use, modification, and distribution of the Standard or +// Modified Versions is governed by this Artistic License. By using, +// modifying or distributing the Package, you accept this license. Do not +// use, modify, or distribute the Package, if you do not accept this +// license. +// +// (11) If your Modified Version has been derived from a Modified +// Version made by someone other than you, you are nevertheless required +// to ensure that your Modified Version complies with the requirements of +// this license. +// +// (12) This license does not grant you the right to use any trademark, +// service mark, tradename, or logo of the Copyright Holder. +// +// (13) This license includes the non-exclusive, worldwide, +// free-of-charge patent license to make, have made, use, offer to sell, +// sell, import and otherwise transfer the Package with respect to any +// patent claims licensable by the Copyright Holder that are necessarily +// infringed by the Package. If you institute patent litigation +// (including a cross-claim or counterclaim) against any party alleging +// that the Package constitutes direct or contributory patent +// infringement, then this Artistic License to you shall terminate on the +// date that such litigation is filed. +// +// (14) Disclaimer of Warranty: +// THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS +// IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED +// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR +// NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL +// LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL +// BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +// DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF +// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import fs from 'node:fs/promises'; +import { rmdirSync } from 'node:fs'; +import promiseRetry from 'promise-retry'; +import { onExit } from 'signal-exit'; + +// a lockfile implementation inspired by the unmaintained proper-lockfile library +// +// similarities: +// - based on mkdir's atomicity +// - works across processes and even machines (via NFS) +// - cleans up after itself +// - detects compromised locks +// +// differences: +// - higher-level API (just a withLock function) +// - written in async/await style +// - uses mtime + inode for more reliable compromised lock detection +// - more ergonomic compromised lock handling (i.e. withLock will reject, and callbacks have access to an AbortSignal) +// - uses a more recent version of signal-exit + +const touchInterval = 1_000; +// mtime precision is platform dependent, so use a reasonably large threshold +const staleThreshold = 5_000; + +// track current locks and their cleanup functions +const currentLocks = new Map void>(); + +function cleanupLocks() { + for (const [, cleanup] of currentLocks) { + try { + cleanup(); + } catch (err) { + // + } + } +} + +// clean up any locks that were not released normally +onExit(cleanupLocks); + +/** + * Acquire an advisory lock for the given path and hold it for the duration of the callback. + * + * The lock will be released automatically when the callback resolves or rejects. + * Concurrent calls to withLock() for the same path will wait until the lock is released. + */ +export async function withLock( + lockPath: string, + cb: (signal: AbortSignal) => Promise, +): Promise { + try { + const signal = await acquireLock(lockPath); + return await new Promise((resolve, reject) => { + signal.addEventListener('abort', () => { + reject( + Object.assign(new Error('Lock compromised'), { + code: 'ECOMPROMISED', + }), + ); + }); + + void (async () => { + try { + resolve(await cb(signal)); + } catch (err) { + reject(err); + } + })(); + }); + } finally { + releaseLock(lockPath); + } +} + +function acquireLock(lockPath: string): Promise { + return promiseRetry( + { + minTimeout: 100, + maxTimeout: 5_000, + // if another process legitimately holds the lock, wait for it to release; if it dies abnormally and the lock becomes stale, we'll acquire it automatically + forever: true, + }, + async (retry: (err: unknown) => never) => { + try { + await fs.mkdir(lockPath); + } catch (err: unknown) { + if ( + err && + typeof err === 'object' && + 'code' in err && + err.code !== 'EEXIST' && + err.code !== 'EBUSY' && + err.code !== 'EPERM' + ) { + throw err; + } + + const status = await getLockStatus(lockPath); + + if (status === 'locked') { + // let's see if we can acquire it on the next attempt 🤞 + return retry(err); + } + if (status === 'stale') { + try { + // there is a very tiny window where another process could also release the stale lock and acquire it before we release it here; the lock compromise checker should detect this and throw an error + deleteLock(lockPath); + } catch (e: unknown) { + // on windows, EBUSY/EPERM can happen if another process is (re)creating the lock; maybe we can acquire it on a subsequent attempt 🤞 + if ( + e && + typeof e === 'object' && + 'code' in e && + (e.code === 'EBUSY' || e.code === 'EPERM') + ) { + return retry(e); + } + throw e; + } + } + // immediately attempt to acquire the lock (no backoff) + return await acquireLock(lockPath); + } + try { + const signal = await maintainLock(lockPath); + return signal; + } catch (err) { + throw Object.assign(new Error('Lock compromised'), { + code: 'ECOMPROMISED', + }); + } + }, + ); +} + +function deleteLock(lockPath: string): void { + try { + // synchronous, so we can call in an exit handler + rmdirSync(lockPath); + } catch (err: unknown) { + if ( + err && + typeof err === 'object' && + 'code' in err && + err.code !== 'ENOENT' + ) { + throw err; + } + } +} + +function releaseLock(lockPath: string): void { + currentLocks.get(lockPath)?.(); + currentLocks.delete(lockPath); +} + +async function getLockStatus( + lockPath: string, +): Promise<'locked' | 'stale' | 'unlocked'> { + try { + const stat = await fs.stat(lockPath); + return Date.now() - stat.mtimeMs > staleThreshold ? 'stale' : 'locked'; + } catch (err: unknown) { + if ( + err && + typeof err === 'object' && + 'code' in err && + err.code === 'ENOENT' + ) { + return 'unlocked'; + } + throw err; + } +} + +async function maintainLock(lockPath: string): Promise { + const controller = new AbortController(); + const stats = await fs.stat(lockPath); + // fs.utimes operates on floating points seconds (directly, or via strings/Date objects), which may not match the underlying filesystem's mtime precision, meaning that we might read a slightly different mtime than we write. always round to the nearest second, since all filesystems support at least second precision + let mtime = Math.round(stats.mtimeMs / 1000); + const signal = controller.signal; + + async function touchLock() { + try { + const currentStats = await fs.stat(lockPath); + const currentMtime = Math.round(currentStats.mtimeMs / 1000); + if (currentStats.ino !== stats.ino || currentMtime !== mtime) { + throw new Error('Lock compromised'); + } + mtime = Math.round(Date.now() / 1000); + // touch the lock, unless we just released it during this iteration + if (currentLocks.has(lockPath)) { + await fs.utimes(lockPath, mtime, mtime); + } + } catch (err: unknown) { + // stats mismatch or other fs error means the lock was compromised + controller.abort(); + } + } + + const timeout = setInterval(() => void touchLock(), touchInterval); + timeout.unref(); + function cleanup() { + clearInterval(timeout); + deleteLock(lockPath); + } + currentLocks.set(lockPath, cleanup); + return signal; +}