Skip to content
This repository was archived by the owner on Oct 14, 2025. It is now read-only.

Commit 8057a78

Browse files
authored
Merge pull request #5 from useblacksmith/bump-cache-version-3.2.158-20241004195445
Bump @actions/cache version to 3.2.158
2 parents a749790 + 86f6ba1 commit 8057a78

File tree

4 files changed

+251
-95
lines changed

4 files changed

+251
-95
lines changed

dist/cache-save/index.js

Lines changed: 123 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -96,10 +96,11 @@ const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, voi
9696
});
9797
});
9898
function reportFailure() {
99+
var _a, _b;
99100
return __awaiter(this, void 0, void 0, function* () {
100101
try {
101102
core.info('Reporting failure to api.blacksmith.sh');
102-
const message = `${process.env.GITHUB_JOB} failed for ${process.env.GITHUB_REPOSITORY} with run ID: ${process.env.GITHUB_RUN_ID}; Sender: ${process.env.GITHUB_TRIGGERING_ACTOR}`;
103+
const message = `${process.env.GITHUB_JOB} failed for ${process.env.GITHUB_REPOSITORY} with run ID: ${process.env.GITHUB_RUN_ID}; Sender: ${process.env.GITHUB_TRIGGERING_ACTOR}; VM ID: ${(_a = process.env.VM_ID) !== null && _a !== void 0 ? _a : 'unknown'}; petname: ${(_b = process.env.PETNAME) !== null && _b !== void 0 ? _b : 'unknown'}`;
103104
const httpClient = (0, cacheHttpClient_1.createHttpClient)();
104105
yield promiseWithTimeout(10000, httpClient.postJson((0, cacheHttpClient_1.getCacheApiUrl)('report-failed'), {
105106
message
@@ -181,9 +182,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
181182
finally {
182183
// Try to delete the archive to save space
183184
try {
184-
const before = Date.now();
185185
yield unlinkWithTimeout(archivePath, 5000);
186-
core.info(`cleaning up archive took ${Date.now() - before}ms`);
187186
}
188187
catch (error) {
189188
core.debug(`Failed to delete archive: ${error}`);
@@ -336,6 +335,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
336335
step((generator = generator.apply(thisArg, _arguments || [])).next());
337336
});
338337
};
338+
var __importDefault = (this && this.__importDefault) || function (mod) {
339+
return (mod && mod.__esModule) ? mod : { "default": mod };
340+
};
339341
Object.defineProperty(exports, "__esModule", ({ value: true }));
340342
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = exports.createHttpClient = exports.getCacheApiUrl = void 0;
341343
const core = __importStar(__nccwpck_require__(2186));
@@ -348,14 +350,20 @@ const utils = __importStar(__nccwpck_require__(1518));
348350
const downloadUtils_1 = __nccwpck_require__(5500);
349351
const options_1 = __nccwpck_require__(6215);
350352
const requestUtils_1 = __nccwpck_require__(3981);
353+
const axios_1 = __importDefault(__nccwpck_require__(8757));
351354
const versionSalt = '1.0';
352355
function getCacheApiUrl(resource) {
353-
const baseUrl = process.env['BLACKSMITH_CACHE_URL'] || 'https://api.blacksmith.sh/cache';
356+
var _a, _b;
357+
let baseUrl = process.env.BLACKSMITH_CACHE_URL;
354358
if (!baseUrl) {
355-
throw new Error('Cache Service Url not found, unable to restore cache.');
359+
baseUrl = ((_a = process.env.PETNAME) === null || _a === void 0 ? void 0 : _a.includes('staging'))
360+
? 'https://stagingapi.blacksmith.sh/cache'
361+
: 'https://api.blacksmith.sh/cache';
356362
}
357363
const url = `${baseUrl}/${resource}`;
358-
core.debug(`Blacksmith cache resource URL: ${url}; version: 3.2.40`);
364+
if ((_b = process.env.PETNAME) === null || _b === void 0 ? void 0 : _b.includes('staging')) {
365+
core.info(`Using staging API: ${url}`);
366+
}
359367
return url;
360368
}
361369
exports.getCacheApiUrl = getCacheApiUrl;
@@ -396,31 +404,65 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
396404
exports.getCacheVersion = getCacheVersion;
397405
function getCacheEntry(keys, paths, options) {
398406
return __awaiter(this, void 0, void 0, function* () {
399-
const httpClient = createHttpClient();
400407
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
401408
const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
402-
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
403-
// Cache not found
404-
if (response.statusCode === 204) {
405-
// List cache for primary key only if cache miss occurs
406-
if (core.isDebug()) {
407-
yield printCachesListForDiagnostics(keys[0], httpClient, version);
409+
const maxRetries = 2;
410+
let retries = 0;
411+
core.info(`Checking cache for keys ${keys.join(',')} and version ${version}`);
412+
while (retries <= maxRetries) {
413+
try {
414+
const before = Date.now();
415+
const response = yield axios_1.default.get(getCacheApiUrl(resource), {
416+
headers: {
417+
Accept: createAcceptHeader('application/json', '6.0-preview.1'),
418+
'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME'],
419+
Authorization: `Bearer ${process.env['BLACKSMITH_CACHE_TOKEN']}`
420+
},
421+
timeout: 10000 // 10 seconds timeout
422+
});
423+
core.debug(`Cache lookup took ${Date.now() - before}ms`);
424+
// Cache not found
425+
if (response.status === 204) {
426+
// List cache for primary key only if cache miss occurs
427+
if (core.isDebug()) {
428+
yield printCachesListForDiagnostics(keys[0], createHttpClient(), version);
429+
}
430+
return null;
431+
}
432+
if (response.status < 200 || response.status >= 300) {
433+
throw new Error(`Cache service responded with ${response.status}`);
434+
}
435+
const cacheResult = response.data;
436+
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
437+
if (!cacheDownloadUrl) {
438+
// Cache archiveLocation not found. This should never happen, and hence bail out.
439+
throw new Error('Cache not found.');
440+
}
441+
core.setSecret(cacheDownloadUrl);
442+
core.debug(`Cache Result:`);
443+
core.debug(JSON.stringify(cacheResult));
444+
return cacheResult;
445+
}
446+
catch (error) {
447+
if (error.response &&
448+
error.response.status >= 500 &&
449+
retries < maxRetries) {
450+
retries++;
451+
core.warning(`Retrying due to server error (attempt ${retries} of ${maxRetries})`);
452+
continue;
453+
}
454+
if (error.response) {
455+
throw new Error(`Cache service responded with ${error.response.status}`);
456+
}
457+
else if (error.code === 'ECONNABORTED') {
458+
throw new Error('Request timed out after 10 seconds');
459+
}
460+
else {
461+
throw error;
462+
}
408463
}
409-
return null;
410-
}
411-
if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) {
412-
throw new Error(`Cache service responded with ${response.statusCode}`);
413-
}
414-
const cacheResult = response.result;
415-
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
416-
if (!cacheDownloadUrl) {
417-
// Cache achiveLocation not found. This should never happen, and hence bail out.
418-
throw new Error('Cache not found.');
419464
}
420-
core.setSecret(cacheDownloadUrl);
421-
core.debug(`Cache Result:`);
422-
core.debug(JSON.stringify(cacheResult));
423-
return cacheResult;
465+
throw new Error(`Failed to get cache entry after ${maxRetries} retries`);
424466
});
425467
}
426468
exports.getCacheEntry = getCacheEntry;
@@ -1064,12 +1106,25 @@ function downloadCacheAxiosMultiPart(archiveLocation, archivePath) {
10641106
});
10651107
try {
10661108
core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
1067-
const metadataResponse = yield axios_1.default.get(archiveLocation, {
1068-
headers: { Range: 'bytes=0-1' }
1069-
});
1070-
const contentRangeHeader = metadataResponse.headers['content-range'];
1109+
let metadataResponse;
1110+
let contentRangeHeader;
1111+
let retries = 0;
1112+
const maxRetries = 2;
1113+
while (retries <= maxRetries) {
1114+
metadataResponse = yield axios_1.default.get(archiveLocation, {
1115+
headers: { Range: 'bytes=0-1' }
1116+
});
1117+
contentRangeHeader = metadataResponse.headers['content-range'];
1118+
if (contentRangeHeader) {
1119+
break;
1120+
}
1121+
retries++;
1122+
if (retries <= maxRetries) {
1123+
core.debug(`Content-Range header not found. Retrying (${retries}/${maxRetries})...`);
1124+
}
1125+
}
10711126
if (!contentRangeHeader) {
1072-
throw new Error('Content-Range is not defined; unable to determine file size');
1127+
throw new Error('Content-Range is not defined after retries; unable to determine file size');
10731128
}
10741129
// Parse the total file size from the Content-Range header
10751130
const fileSize = parseInt(contentRangeHeader.split('/')[1]);
@@ -1247,7 +1302,6 @@ exports.downloadCacheHttpClient = downloadCacheHttpClient;
12471302
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
12481303
var _a;
12491304
return __awaiter(this, void 0, void 0, function* () {
1250-
core.info('Downloading from cache using Blacksmith Actions http-client');
12511305
const archiveDescriptor = yield fs.promises.open(archivePath, 'w+');
12521306
// Set file permissions so that other users can untar the cache
12531307
yield archiveDescriptor.chmod(0o644);
@@ -1262,19 +1316,34 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
12621316
}, 300000);
12631317
stallTimeout.unref(); // Don't keep the process alive if the download is stalled.
12641318
try {
1265-
const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
1266-
return httpClient.get(archiveLocation, {
1267-
Range: 'bytes=0-1'
1319+
let metadataResponse;
1320+
let contentRangeHeader;
1321+
let retries = 0;
1322+
const maxRetries = 2;
1323+
while (retries <= maxRetries) {
1324+
metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
1325+
return httpClient.get(archiveLocation, {
1326+
Range: 'bytes=0-1'
1327+
});
1328+
}));
1329+
// Abort download if no traffic received over the socket.
1330+
metadataResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
1331+
metadataResponse.message.destroy();
1332+
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
12681333
});
1269-
}));
1270-
// Abort download if no traffic received over the socket.
1271-
metadataResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
1272-
metadataResponse.message.destroy();
1273-
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
1274-
});
1275-
const contentRangeHeader = metadataResponse.message.headers['content-range'];
1334+
contentRangeHeader = metadataResponse.message.headers['content-range'];
1335+
if (contentRangeHeader) {
1336+
break;
1337+
}
1338+
retries++;
1339+
if (retries <= maxRetries) {
1340+
core.debug(`Content-Range header not found. Retrying (${retries}/${maxRetries})...`);
1341+
}
1342+
}
12761343
if (!contentRangeHeader) {
1277-
throw new Error('Content-Range is not defined; unable to determine file size');
1344+
const headers = JSON.stringify(metadataResponse.message.headers);
1345+
const statusCode = metadataResponse.message.statusCode;
1346+
throw new Error(`Content-Range is not defined; unable to determine file size; Headers: ${headers}; Status Code: ${statusCode}`);
12781347
}
12791348
// Parse the total file size from the Content-Range header
12801349
const length = parseInt(contentRangeHeader.split('/')[1]);
@@ -1559,9 +1628,12 @@ function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRet
15591628
isRetryable = isRetryableStatusCode(statusCode);
15601629
errorMessage = `Cache service responded with ${statusCode}`;
15611630
}
1631+
if (!statusCode) {
1632+
isRetryable = true;
1633+
}
15621634
core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
15631635
if (!isRetryable) {
1564-
core.debug(`${name} - Error is not retryable`);
1636+
core.warning(`${name} - Error is not retryable; Status Code: ${statusCode}; Error: ${errorMessage}`);
15651637
break;
15661638
}
15671639
yield sleep(delay);
@@ -1690,11 +1762,14 @@ function getTarPath() {
16901762
}
16911763
// Return arguments for tar as per tarPath, compressionMethod, method type and os
16921764
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
1765+
var _a, _b;
16931766
return __awaiter(this, void 0, void 0, function* () {
16941767
const args = [`"${tarPath.path}"`];
16951768
const cacheFileName = utils.getCacheFileName(compressionMethod);
16961769
const tarFile = 'cache.tar';
16971770
const workingDirectory = getWorkingDirectory();
1771+
const shouldSkipOldFiles = ((_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.includes('muzzapp')) ||
1772+
((_b = process.env['GITHUB_REPOSITORY']) === null || _b === void 0 ? void 0 : _b.includes('FastActions'));
16981773
// Speficic args for BSD tar on windows for workaround
16991774
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
17001775
compressionMethod !== constants_1.CompressionMethod.Gzip &&
@@ -1712,6 +1787,9 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
17121787
args.push('-xf', BSD_TAR_ZSTD
17131788
? tarFile
17141789
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
1790+
if (shouldSkipOldFiles) {
1791+
args.push('--skip-old-files');
1792+
}
17151793
break;
17161794
case 'list':
17171795
args.push('-tf', BSD_TAR_ZSTD

0 commit comments

Comments
 (0)