Skip to content

Commit f382193

Browse files
authored
Update @actions/cache version to 1.0.8 (#283)
1 parent 3ef38b8 commit f382193

File tree

7 files changed

+103
-65
lines changed

7 files changed

+103
-65
lines changed

.licenses/npm/@actions/cache.dep.yml

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.licenses/npm/@azure/core-http.dep.yml

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.licenses/npm/node-fetch.dep.yml

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

dist/cache-save/index.js

+45-26
Original file line numberDiff line numberDiff line change
@@ -1041,10 +1041,10 @@ function createTempDirectory() {
10411041
});
10421042
}
10431043
exports.createTempDirectory = createTempDirectory;
1044-
function getArchiveFileSizeIsBytes(filePath) {
1044+
function getArchiveFileSizeInBytes(filePath) {
10451045
return fs.statSync(filePath).size;
10461046
}
1047-
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
1047+
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
10481048
function resolvePaths(patterns) {
10491049
var e_1, _a;
10501050
var _b;
@@ -3852,7 +3852,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
38523852
function uploadFile(httpClient, cacheId, archivePath, options) {
38533853
return __awaiter(this, void 0, void 0, function* () {
38543854
// Upload Chunks
3855-
const fileSize = fs.statSync(archivePath).size;
3855+
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
38563856
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
38573857
const fd = fs.openSync(archivePath, 'r');
38583858
const uploadOptions = options_1.getUploadOptions(options);
@@ -3902,7 +3902,7 @@ function saveCache(cacheId, archivePath, options) {
39023902
yield uploadFile(httpClient, cacheId, archivePath, options);
39033903
// Commit Cache
39043904
core.debug('Commiting cache');
3905-
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
3905+
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
39063906
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
39073907
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
39083908
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
@@ -5877,7 +5877,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
58775877
const contentLengthHeader = downloadResponse.message.headers['content-length'];
58785878
if (contentLengthHeader) {
58795879
const expectedLength = parseInt(contentLengthHeader);
5880-
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
5880+
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
58815881
if (actualLength !== expectedLength) {
58825882
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
58835883
}
@@ -34322,7 +34322,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
3432234322
});
3432334323

3432434324
const INTERNALS$2 = Symbol('Request internals');
34325-
const URL = whatwgUrl.URL;
34325+
const URL = Url.URL || whatwgUrl.URL;
3432634326

3432734327
// fix an issue where "format", "parse" aren't a named export for node <10
3432834328
const parse_url = Url.parse;
@@ -41451,7 +41451,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
4145141451
if (core.isDebug()) {
4145241452
yield tar_1.listTar(archivePath, compressionMethod);
4145341453
}
41454-
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
41454+
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
4145541455
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
4145641456
yield tar_1.extractTar(archivePath, compressionMethod);
4145741457
core.info('Cache restored successfully');
@@ -41496,18 +41496,29 @@ function saveCache(paths, key, options) {
4149641496
const archiveFolder = yield utils.createTempDirectory();
4149741497
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
4149841498
core.debug(`Archive Path: ${archivePath}`);
41499-
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
41500-
if (core.isDebug()) {
41501-
yield tar_1.listTar(archivePath, compressionMethod);
41502-
}
41503-
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
41504-
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
41505-
core.debug(`File Size: ${archiveFileSize}`);
41506-
if (archiveFileSize > fileSizeLimit) {
41507-
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
41508-
}
41509-
core.debug(`Saving Cache (ID: ${cacheId})`);
41510-
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
41499+
try {
41500+
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
41501+
if (core.isDebug()) {
41502+
yield tar_1.listTar(archivePath, compressionMethod);
41503+
}
41504+
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
41505+
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
41506+
core.debug(`File Size: ${archiveFileSize}`);
41507+
if (archiveFileSize > fileSizeLimit) {
41508+
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
41509+
}
41510+
core.debug(`Saving Cache (ID: ${cacheId})`);
41511+
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
41512+
}
41513+
finally {
41514+
// Try to delete the archive to save space
41515+
try {
41516+
yield utils.unlinkFile(archivePath);
41517+
}
41518+
catch (error) {
41519+
core.debug(`Failed to delete archive: ${error}`);
41520+
}
41521+
}
4151141522
return cacheId;
4151241523
});
4151341524
}
@@ -53218,7 +53229,12 @@ class HttpHeaders {
5321853229
* Create a deep clone/copy of this HttpHeaders collection.
5321953230
*/
5322053231
clone() {
53221-
return new HttpHeaders(this.rawHeaders());
53232+
const resultPreservingCasing = {};
53233+
for (const headerKey in this._headersMap) {
53234+
const header = this._headersMap[headerKey];
53235+
resultPreservingCasing[header.name] = header.value;
53236+
}
53237+
return new HttpHeaders(resultPreservingCasing);
5322253238
}
5322353239
}
5322453240

@@ -53255,7 +53271,7 @@ const Constants = {
5325553271
/**
5325653272
* The core-http version
5325753273
*/
53258-
coreHttpVersion: "2.2.1",
53274+
coreHttpVersion: "2.2.2",
5325953275
/**
5326053276
* Specifies HTTP.
5326153277
*/
@@ -55568,7 +55584,7 @@ class FetchHttpClient {
5556855584
}
5556955585
let downloadStreamDone = Promise.resolve();
5557055586
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
55571-
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
55587+
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
5557255588
}
5557355589
Promise.all([uploadStreamDone, downloadStreamDone])
5557455590
.then(() => {
@@ -55586,11 +55602,14 @@ class FetchHttpClient {
5558655602
function isReadableStream(body) {
5558755603
return body && typeof body.pipe === "function";
5558855604
}
55589-
function isStreamComplete(stream) {
55605+
function isStreamComplete(stream, aborter) {
5559055606
return new Promise((resolve) => {
55591-
stream.on("close", resolve);
55592-
stream.on("end", resolve);
55593-
stream.on("error", resolve);
55607+
stream.once("close", () => {
55608+
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
55609+
resolve();
55610+
});
55611+
stream.once("end", resolve);
55612+
stream.once("error", resolve);
5559455613
});
5559555614
}
5559655615
function parseHeaders(headers) {

dist/setup/index.js

+45-26
Original file line numberDiff line numberDiff line change
@@ -1041,10 +1041,10 @@ function createTempDirectory() {
10411041
});
10421042
}
10431043
exports.createTempDirectory = createTempDirectory;
1044-
function getArchiveFileSizeIsBytes(filePath) {
1044+
function getArchiveFileSizeInBytes(filePath) {
10451045
return fs.statSync(filePath).size;
10461046
}
1047-
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
1047+
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
10481048
function resolvePaths(patterns) {
10491049
var e_1, _a;
10501050
var _b;
@@ -4552,7 +4552,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
45524552
function uploadFile(httpClient, cacheId, archivePath, options) {
45534553
return __awaiter(this, void 0, void 0, function* () {
45544554
// Upload Chunks
4555-
const fileSize = fs.statSync(archivePath).size;
4555+
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
45564556
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
45574557
const fd = fs.openSync(archivePath, 'r');
45584558
const uploadOptions = options_1.getUploadOptions(options);
@@ -4602,7 +4602,7 @@ function saveCache(cacheId, archivePath, options) {
46024602
yield uploadFile(httpClient, cacheId, archivePath, options);
46034603
// Commit Cache
46044604
core.debug('Commiting cache');
4605-
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
4605+
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
46064606
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
46074607
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
46084608
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
@@ -7551,7 +7551,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
75517551
const contentLengthHeader = downloadResponse.message.headers['content-length'];
75527552
if (contentLengthHeader) {
75537553
const expectedLength = parseInt(contentLengthHeader);
7554-
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
7554+
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
75557555
if (actualLength !== expectedLength) {
75567556
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
75577557
}
@@ -36873,7 +36873,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
3687336873
});
3687436874

3687536875
const INTERNALS$2 = Symbol('Request internals');
36876-
const URL = whatwgUrl.URL;
36876+
const URL = Url.URL || whatwgUrl.URL;
3687736877

3687836878
// fix an issue where "format", "parse" aren't a named export for node <10
3687936879
const parse_url = Url.parse;
@@ -47198,7 +47198,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
4719847198
if (core.isDebug()) {
4719947199
yield tar_1.listTar(archivePath, compressionMethod);
4720047200
}
47201-
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
47201+
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
4720247202
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
4720347203
yield tar_1.extractTar(archivePath, compressionMethod);
4720447204
core.info('Cache restored successfully');
@@ -47243,18 +47243,29 @@ function saveCache(paths, key, options) {
4724347243
const archiveFolder = yield utils.createTempDirectory();
4724447244
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
4724547245
core.debug(`Archive Path: ${archivePath}`);
47246-
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
47247-
if (core.isDebug()) {
47248-
yield tar_1.listTar(archivePath, compressionMethod);
47249-
}
47250-
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
47251-
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
47252-
core.debug(`File Size: ${archiveFileSize}`);
47253-
if (archiveFileSize > fileSizeLimit) {
47254-
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
47255-
}
47256-
core.debug(`Saving Cache (ID: ${cacheId})`);
47257-
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
47246+
try {
47247+
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
47248+
if (core.isDebug()) {
47249+
yield tar_1.listTar(archivePath, compressionMethod);
47250+
}
47251+
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
47252+
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
47253+
core.debug(`File Size: ${archiveFileSize}`);
47254+
if (archiveFileSize > fileSizeLimit) {
47255+
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
47256+
}
47257+
core.debug(`Saving Cache (ID: ${cacheId})`);
47258+
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
47259+
}
47260+
finally {
47261+
// Try to delete the archive to save space
47262+
try {
47263+
yield utils.unlinkFile(archivePath);
47264+
}
47265+
catch (error) {
47266+
core.debug(`Failed to delete archive: ${error}`);
47267+
}
47268+
}
4725847269
return cacheId;
4725947270
});
4726047271
}
@@ -59947,7 +59958,12 @@ class HttpHeaders {
5994759958
* Create a deep clone/copy of this HttpHeaders collection.
5994859959
*/
5994959960
clone() {
59950-
return new HttpHeaders(this.rawHeaders());
59961+
const resultPreservingCasing = {};
59962+
for (const headerKey in this._headersMap) {
59963+
const header = this._headersMap[headerKey];
59964+
resultPreservingCasing[header.name] = header.value;
59965+
}
59966+
return new HttpHeaders(resultPreservingCasing);
5995159967
}
5995259968
}
5995359969

@@ -59984,7 +60000,7 @@ const Constants = {
5998460000
/**
5998560001
* The core-http version
5998660002
*/
59987-
coreHttpVersion: "2.2.1",
60003+
coreHttpVersion: "2.2.2",
5998860004
/**
5998960005
* Specifies HTTP.
5999060006
*/
@@ -62297,7 +62313,7 @@ class FetchHttpClient {
6229762313
}
6229862314
let downloadStreamDone = Promise.resolve();
6229962315
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
62300-
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
62316+
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
6230162317
}
6230262318
Promise.all([uploadStreamDone, downloadStreamDone])
6230362319
.then(() => {
@@ -62315,11 +62331,14 @@ class FetchHttpClient {
6231562331
function isReadableStream(body) {
6231662332
return body && typeof body.pipe === "function";
6231762333
}
62318-
function isStreamComplete(stream) {
62334+
function isStreamComplete(stream, aborter) {
6231962335
return new Promise((resolve) => {
62320-
stream.on("close", resolve);
62321-
stream.on("end", resolve);
62322-
stream.on("error", resolve);
62336+
stream.once("close", () => {
62337+
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
62338+
resolve();
62339+
});
62340+
stream.once("end", resolve);
62341+
stream.once("error", resolve);
6232362342
});
6232462343
}
6232562344
function parseHeaders(headers) {

package-lock.json

+9-9
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
"author": "GitHub",
2424
"license": "MIT",
2525
"dependencies": {
26-
"@actions/cache": "^1.0.7",
26+
"@actions/cache": "^1.0.8",
2727
"@actions/core": "^1.2.3",
2828
"@actions/exec": "^1.1.0",
2929
"@actions/glob": "^0.2.0",

0 commit comments

Comments
 (0)