Skip to content

Commit

Permalink
Merge pull request #8 from useblacksmith/bump-version-region
Browse files Browse the repository at this point in the history
*: bump cache to 3.2.213
  • Loading branch information
aayushshah15 authored Oct 31, 2024
2 parents 6ff6225 + 56c1200 commit 65c6ca8
Show file tree
Hide file tree
Showing 4 changed files with 425 additions and 77 deletions.
246 changes: 210 additions & 36 deletions dist/cache-save/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ function reportFailure() {
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d;
return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths);
restoreKeys = restoreKeys || [];
Expand All @@ -135,47 +136,76 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
checkKey(key);
}
const compressionMethod = yield utils.getCompressionMethod();
let archivePath = '';
// TODO(aayush): Clean this up.
let archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
const useCacheManager = false;
let cacheEntry = null;
let cacheKey = undefined;
try {
// path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod,
enableCrossOsArchive
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
if (useCacheManager) {
const cacheEntry = yield cacheHttpClient.getCacheEntryUsingCacheMgr(keys, paths, archivePath, {
compressionMethod,
enableCrossOsArchive
});
if (!cacheEntry) {
core.info('Did not get a cache hit; proceeding as an uncached run');
return undefined;
}
// await cacheHttpClient.downloadBlobUsingCacheMgr(cacheEntry.cacheId)
archivePath = path.join('/home/runner/blacksmith', cacheEntry.cacheId);
yield cacheHttpClient.mountSharedNFSVolume();
yield cacheHttpClient.waitForArchiveToBeAvailable(cacheEntry.cacheId);
cacheKey = cacheEntry.cacheKey;
}
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
core.info('Lookup only - skipping download');
return cacheEntry.cacheKey;
else {
// path are needed to compute version
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod,
enableCrossOsArchive
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
cacheKey = cacheEntry.cacheKey;
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
core.info('Lookup only - skipping download');
return cacheEntry.cacheKey;
}
// Download the cache from the cache entry
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
if (core.isDebug()) {
yield (0, tar_1.listTar)(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
const extractStartTime = Date.now();
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
const extractEndTime = Date.now();
const extractionTimeSeconds = (extractEndTime - extractStartTime) / 1000;
core.info(`Cache extraction completed in ${extractionTimeSeconds.toFixed(2)} seconds`);
core.info('Cache restored successfully');
return cacheEntry.cacheKey;
return cacheKey;
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else {
// Supress all non-validation cache related errors because caching should be optional
if (error.message.includes(`Cache service responded with 404`)) {
// Suppress all non-validation cache related errors because caching should be optional
if ((_a = typedError.message) === null || _a === void 0 ? void 0 : _a.includes(`Cache service responded with 404`)) {
core.info(`Did not get a cache hit; proceeding as an uncached run`);
}
else {
core.warning(`Failed to restore: ${error.message}`);
yield reportFailure();
core.warning(`Failed to restore: ${typedError.message}`);
if (!((_b = typedError.message) === null || _b === void 0 ? void 0 : _b.includes('File exists')) &&
!((_c = typedError.message) === null || _c === void 0 ? void 0 : _c.includes('Operation not permitted')) &&
!((_d = typedError.message) === null || _d === void 0 ? void 0 : _d.includes('failed with exit code 2'))) {
yield reportFailure();
}
}
}
}
Expand Down Expand Up @@ -335,11 +365,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = exports.createHttpClient = exports.getCacheApiUrl = void 0;
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheEntryUsingCacheMgr = exports.downloadBlobUsingCacheMgr = exports.waitForArchiveToBeAvailable = exports.mountSharedNFSVolume = exports.getCacheVersion = exports.createHttpClient = exports.getCacheApiUrl = void 0;
const core = __importStar(__nccwpck_require__(2186));
const http_client_1 = __nccwpck_require__(6255);
const auth_1 = __nccwpck_require__(5526);
Expand All @@ -350,7 +377,8 @@ const utils = __importStar(__nccwpck_require__(1518));
const downloadUtils_1 = __nccwpck_require__(5500);
const options_1 = __nccwpck_require__(6215);
const requestUtils_1 = __nccwpck_require__(3981);
const axios_1 = __importDefault(__nccwpck_require__(8757));
const axios_1 = __importStar(__nccwpck_require__(8757));
const child_process_1 = __nccwpck_require__(2081);
const versionSalt = '1.0';
function getCacheApiUrl(resource) {
var _a, _b;
Expand Down Expand Up @@ -402,11 +430,147 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
}
exports.getCacheVersion = getCacheVersion;
function mountSharedNFSVolume() {
return __awaiter(this, void 0, void 0, function* () {
const mountPoint = '/home/runner/blacksmith';
// Create the mount point directory if it doesn't exist
if (!fs.existsSync(mountPoint)) {
fs.mkdirSync(mountPoint, { recursive: true });
}
const mountCommand = `sudo mount -t nfs -o nconnect=16 192.168.127.1:/blacksmith/cache ${mountPoint}`;
try {
(0, child_process_1.execSync)(mountCommand);
core.info(`NFS volume mounted successfully at ${mountPoint}`);
}
catch (error) {
core.error(`Failed to mount NFS volume: ${error}`);
throw error;
}
});
}
exports.mountSharedNFSVolume = mountSharedNFSVolume;
function waitForArchiveToBeAvailable(cacheId) {
return __awaiter(this, void 0, void 0, function* () {
const cacheFilePath = `/home/runner/blacksmith/${cacheId}`;
const doneFilePath = `${cacheFilePath}_done`;
const startTime = Date.now();
const timeoutMs = 2 * 60 * 1000; // 2 minutes in milliseconds
while (!fs.existsSync(doneFilePath)) {
if (Date.now() - startTime > timeoutMs) {
throw new Error(`Timeout waiting for ${doneFilePath} to appear`);
}
yield new Promise(resolve => setTimeout(resolve, 1000));
}
});
}
exports.waitForArchiveToBeAvailable = waitForArchiveToBeAvailable;
function downloadBlobUsingCacheMgr(cacheId) {
return __awaiter(this, void 0, void 0, function* () {
const archiveDir = '/home/runner/blacksmith';
if (!fs.existsSync(archiveDir)) {
fs.mkdirSync(archiveDir, { recursive: true });
}
const archivePath = `${archiveDir}/${cacheId}`;
// Create a file to write the cache to.
fs.writeFileSync(archivePath, '');
const cacheManagerEndpoint = `http://192.168.127.1:5555/cache/${cacheId}/download`;
try {
const before = Date.now();
core.info(`Transferring blob from the host into the VM to ${archivePath}`);
const response = yield (0, axios_1.default)({
method: 'get',
url: cacheManagerEndpoint,
responseType: 'stream'
});
const writer = fs.createWriteStream(archivePath);
response.data.pipe(writer);
return new Promise((resolve, reject) => {
writer.on('finish', () => {
const duration = (Date.now() - before) / 1000;
const fileSizeInBytes = fs.statSync(archivePath).size;
const speedMBps = fileSizeInBytes / (1024 * 1024) / duration;
core.info(`Blob transfer completed in ${duration.toFixed(2)}s (${speedMBps.toFixed(2)} MB/s)`);
resolve();
});
writer.on('error', reject);
});
}
catch (error) {
throw new Error(`Failed to download cache blob: ${error.message}`);
}
});
}
exports.downloadBlobUsingCacheMgr = downloadBlobUsingCacheMgr;
// getCacheEntryUsingCacheMgr is used to get the cache entry from the cache manager.
// It asks the cache manager to check whether the cache key exists and if it does, it waits
// until the cache manager reports that the cache key is ready to be downloaded.
function getCacheEntryUsingCacheMgr(keys, paths, destinationPath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const cacheManagerEndpoint = 'http://192.168.127.1:5555/cache';
const formData = new URLSearchParams({
keys: keys.join(','),
version,
destinationPath
});
let response;
try {
response = yield axios_1.default.post(cacheManagerEndpoint, formData, {
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
Authorization: `Bearer ${process.env['BLACKSMITH_CACHE_TOKEN']}`,
'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME'] || ''
},
timeout: 10000 // 10 seconds timeout
});
}
catch (error) {
if ((0, axios_1.isAxiosError)(error) && ((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {
return undefined;
}
throw error;
}
if (response.status !== 200) {
throw new Error(`Cache service responded with ${response.status}`);
}
const result = response.data;
if (result.restoreStatus === 'miss') {
return undefined; // Cache not found, nothing to download
}
if (result.restoreStatus === 'done') {
core.info(`Blob found in cache manager; proceeding to direct transfer`);
return result;
}
// If the restoreStatus is in_progress, we loop around until it's done or failed.
if (result.restoreStatus === 'in_progress') {
const startTime = Date.now();
const maxWaitTime = 5 * 60 * 1000; // 5 minutes in milliseconds
while (Date.now() - startTime < maxWaitTime) {
yield new Promise(resolve => setTimeout(resolve, 1000));
response = yield axios_1.default.get(`${cacheManagerEndpoint}/${result.cacheId}`);
const status = response.data.status;
if (status === 'done') {
return result;
}
if (status === 'failed') {
core.warning(`Cache restore failed`);
return undefined;
}
}
core.warning(`Cache restore timed out after 5 minutes`);
return undefined;
}
return result;
});
}
exports.getCacheEntryUsingCacheMgr = getCacheEntryUsingCacheMgr;
function getCacheEntry(keys, paths, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const maxRetries = 2;
const maxRetries = 3;
let retries = 0;
core.info(`Checking cache for keys ${keys.join(',')} and version ${version}`);
while (retries <= maxRetries) {
Expand All @@ -416,9 +580,10 @@ function getCacheEntry(keys, paths, options) {
headers: {
Accept: createAcceptHeader('application/json', '6.0-preview.1'),
'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME'],
Authorization: `Bearer ${process.env['BLACKSMITH_CACHE_TOKEN']}`
Authorization: `Bearer ${process.env['BLACKSMITH_CACHE_TOKEN']}`,
'X-Cache-Region': (_a = process.env['BLACKSMITH_REGION']) !== null && _a !== void 0 ? _a : 'eu-central'
},
timeout: 10000 // 10 seconds timeout
timeout: 3000 // 3 seconds timeout
});
core.debug(`Cache lookup took ${Date.now() - before}ms`);
// Cache not found
Expand All @@ -444,18 +609,24 @@ function getCacheEntry(keys, paths, options) {
return cacheResult;
}
catch (error) {
if (error.response &&
error.response.status >= 500 &&
retries < maxRetries) {
if ((error.response && error.response.status >= 500) ||
error.code === 'ECONNABORTED') {
retries++;
core.warning(`Retrying due to server error (attempt ${retries} of ${maxRetries})`);
continue;
if (retries <= maxRetries) {
if (error.code === 'ECONNABORTED') {
core.warning(`Request timed out. Retrying (attempt ${retries} of ${maxRetries})`);
}
else {
core.warning(`Retrying due to error: ${error.message} (attempt ${retries} of ${maxRetries})`);
}
continue;
}
}
if (error.response) {
throw new Error(`Cache service responded with ${error.response.status}`);
}
else if (error.code === 'ECONNABORTED') {
throw new Error('Request timed out after 10 seconds');
throw new Error('Request timed out after 3 seconds');
}
else {
throw error;
Expand Down Expand Up @@ -518,7 +689,10 @@ function reserveCache(key, paths, options) {
cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize
};
const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
var _a;
return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest, {
'X-Cache-Region': (_a = process.env['BLACKSMITH_REGION']) !== null && _a !== void 0 ? _a : 'eu-central'
});
}));
return response;
});
Expand Down
Loading

0 comments on commit 65c6ca8

Please sign in to comment.