mirror of https://github.com/actions/cache.git
				
				
				
			Switch cache action to use the cache node package
This commit is contained in:
		
							parent
							
								
									16a133d9a7
								
							
						
					
					
						commit
						7f9517a009
					
				|  | @ -1,98 +1,65 @@ | |||
| import * as core from "@actions/core"; | ||||
| import * as io from "@actions/io"; | ||||
| import { promises as fs } from "fs"; | ||||
| import * as os from "os"; | ||||
| import * as path from "path"; | ||||
| 
 | ||||
| import { Events, Outputs, RefKey, State } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
| 
 | ||||
| import uuid = require("uuid"); | ||||
| 
 | ||||
| jest.mock("@actions/core"); | ||||
| jest.mock("os"); | ||||
| 
 | ||||
| function getTempDir(): string { | ||||
|     return path.join(__dirname, "_temp", "actionUtils"); | ||||
| } | ||||
| 
 | ||||
| afterEach(() => { | ||||
|     delete process.env[Events.Key]; | ||||
|     delete process.env[RefKey]; | ||||
| }); | ||||
| 
 | ||||
| afterAll(async () => { | ||||
|     delete process.env["GITHUB_WORKSPACE"]; | ||||
|     await io.rmRF(getTempDir()); | ||||
| }); | ||||
| 
 | ||||
| test("getArchiveFileSize returns file size", () => { | ||||
|     const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt"); | ||||
| 
 | ||||
|     const size = actionUtils.getArchiveFileSize(filePath); | ||||
| 
 | ||||
|     expect(size).toBe(11); | ||||
| }); | ||||
| 
 | ||||
| test("isExactKeyMatch with undefined cache entry returns false", () => { | ||||
| test("isExactKeyMatch with undefined cache key returns false", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry = undefined; | ||||
|     const cacheKey = undefined; | ||||
| 
 | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false); | ||||
| }); | ||||
| 
 | ||||
| test("isExactKeyMatch with empty cache entry returns false", () => { | ||||
| test("isExactKeyMatch with empty cache key returns false", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = {}; | ||||
|     const cacheKey = ""; | ||||
| 
 | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false); | ||||
| }); | ||||
| 
 | ||||
| test("isExactKeyMatch with different keys returns false", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-" | ||||
|     }; | ||||
|     const cacheKey = "linux-"; | ||||
| 
 | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false); | ||||
| }); | ||||
| 
 | ||||
| test("isExactKeyMatch with different key accents returns false", () => { | ||||
|     const key = "linux-áccent"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-accent" | ||||
|     }; | ||||
|     const cacheKey = "linux-accent"; | ||||
| 
 | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false); | ||||
| }); | ||||
| 
 | ||||
| test("isExactKeyMatch with same key returns true", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-rust" | ||||
|     }; | ||||
|     const cacheKey = "linux-rust"; | ||||
| 
 | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true); | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true); | ||||
| }); | ||||
| 
 | ||||
| test("isExactKeyMatch with same key and different casing returns true", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "LINUX-RUST" | ||||
|     }; | ||||
|     const cacheKey = "LINUX-RUST"; | ||||
| 
 | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true); | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true); | ||||
| }); | ||||
| 
 | ||||
| test("setOutputAndState with undefined entry to set cache-hit output", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry = undefined; | ||||
|     const cacheKey = undefined; | ||||
| 
 | ||||
|     const setOutputMock = jest.spyOn(core, "setOutput"); | ||||
|     const saveStateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     actionUtils.setOutputAndState(key, cacheEntry); | ||||
|     actionUtils.setOutputAndState(key, cacheKey); | ||||
| 
 | ||||
|     expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false"); | ||||
|     expect(setOutputMock).toHaveBeenCalledTimes(1); | ||||
|  | @ -102,43 +69,33 @@ test("setOutputAndState with undefined entry to set cache-hit output", () => { | |||
| 
 | ||||
| test("setOutputAndState with exact match to set cache-hit output and state", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-rust" | ||||
|     }; | ||||
|     const cacheKey = "linux-rust"; | ||||
| 
 | ||||
|     const setOutputMock = jest.spyOn(core, "setOutput"); | ||||
|     const saveStateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     actionUtils.setOutputAndState(key, cacheEntry); | ||||
|     actionUtils.setOutputAndState(key, cacheKey); | ||||
| 
 | ||||
|     expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true"); | ||||
|     expect(setOutputMock).toHaveBeenCalledTimes(1); | ||||
| 
 | ||||
|     expect(saveStateMock).toHaveBeenCalledWith( | ||||
|         State.CacheResult, | ||||
|         JSON.stringify(cacheEntry) | ||||
|     ); | ||||
|     expect(saveStateMock).toHaveBeenCalledWith(State.CacheResult, cacheKey); | ||||
|     expect(saveStateMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
| 
 | ||||
| test("setOutputAndState with no exact match to set cache-hit output and state", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43" | ||||
|     }; | ||||
|     const cacheKey = "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
| 
 | ||||
|     const setOutputMock = jest.spyOn(core, "setOutput"); | ||||
|     const saveStateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     actionUtils.setOutputAndState(key, cacheEntry); | ||||
|     actionUtils.setOutputAndState(key, cacheKey); | ||||
| 
 | ||||
|     expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false"); | ||||
|     expect(setOutputMock).toHaveBeenCalledTimes(1); | ||||
| 
 | ||||
|     expect(saveStateMock).toHaveBeenCalledWith( | ||||
|         State.CacheResult, | ||||
|         JSON.stringify(cacheEntry) | ||||
|     ); | ||||
|     expect(saveStateMock).toHaveBeenCalledWith(State.CacheResult, cacheKey); | ||||
|     expect(saveStateMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
| 
 | ||||
|  | @ -157,20 +114,16 @@ test("getCacheState with no state returns undefined", () => { | |||
| }); | ||||
| 
 | ||||
| test("getCacheState with valid state", () => { | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const cacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
| 
 | ||||
|     const getStateMock = jest.spyOn(core, "getState"); | ||||
|     getStateMock.mockImplementation(() => { | ||||
|         return JSON.stringify(cacheEntry); | ||||
|         return cacheKey; | ||||
|     }); | ||||
| 
 | ||||
|     const state = actionUtils.getCacheState(); | ||||
| 
 | ||||
|     expect(state).toEqual(cacheEntry); | ||||
|     expect(state).toEqual(cacheKey); | ||||
| 
 | ||||
|     expect(getStateMock).toHaveBeenCalledWith(State.CacheResult); | ||||
|     expect(getStateMock).toHaveBeenCalledTimes(1); | ||||
|  | @ -195,137 +148,6 @@ test("isValidEvent returns false for event that does not have a branch or tag", | |||
|     expect(isValidEvent).toBe(false); | ||||
| }); | ||||
| 
 | ||||
| test("resolvePaths with no ~ in path", async () => { | ||||
|     const filePath = ".cache"; | ||||
| 
 | ||||
|     // Create the following layout:
 | ||||
|     //   cwd
 | ||||
|     //   cwd/.cache
 | ||||
|     //   cwd/.cache/file.txt
 | ||||
| 
 | ||||
|     const root = path.join(getTempDir(), "no-tilde"); | ||||
|     // tarball entries will be relative to workspace
 | ||||
|     process.env["GITHUB_WORKSPACE"] = root; | ||||
| 
 | ||||
|     await fs.mkdir(root, { recursive: true }); | ||||
|     const cache = path.join(root, ".cache"); | ||||
|     await fs.mkdir(cache, { recursive: true }); | ||||
|     await fs.writeFile(path.join(cache, "file.txt"), "cached"); | ||||
| 
 | ||||
|     const originalCwd = process.cwd(); | ||||
| 
 | ||||
|     try { | ||||
|         process.chdir(root); | ||||
| 
 | ||||
|         const resolvedPath = await actionUtils.resolvePaths([filePath]); | ||||
| 
 | ||||
|         const expectedPath = [filePath]; | ||||
|         expect(resolvedPath).toStrictEqual(expectedPath); | ||||
|     } finally { | ||||
|         process.chdir(originalCwd); | ||||
|     } | ||||
| }); | ||||
| 
 | ||||
| test("resolvePaths with ~ in path", async () => { | ||||
|     const cacheDir = uuid(); | ||||
|     const filePath = `~/${cacheDir}`; | ||||
|     // Create the following layout:
 | ||||
|     //   ~/uuid
 | ||||
|     //   ~/uuid/file.txt
 | ||||
| 
 | ||||
|     const homedir = jest.requireActual("os").homedir(); | ||||
|     const homedirMock = jest.spyOn(os, "homedir"); | ||||
|     homedirMock.mockImplementation(() => { | ||||
|         return homedir; | ||||
|     }); | ||||
| 
 | ||||
|     const target = path.join(homedir, cacheDir); | ||||
|     await fs.mkdir(target, { recursive: true }); | ||||
|     await fs.writeFile(path.join(target, "file.txt"), "cached"); | ||||
| 
 | ||||
|     const root = getTempDir(); | ||||
|     process.env["GITHUB_WORKSPACE"] = root; | ||||
| 
 | ||||
|     try { | ||||
|         const resolvedPath = await actionUtils.resolvePaths([filePath]); | ||||
| 
 | ||||
|         const expectedPath = [path.relative(root, target)]; | ||||
|         expect(resolvedPath).toStrictEqual(expectedPath); | ||||
|     } finally { | ||||
|         await io.rmRF(target); | ||||
|     } | ||||
| }); | ||||
| 
 | ||||
| test("resolvePaths with home not found", async () => { | ||||
|     const filePath = "~/.cache/yarn"; | ||||
|     const homedirMock = jest.spyOn(os, "homedir"); | ||||
|     homedirMock.mockImplementation(() => { | ||||
|         return ""; | ||||
|     }); | ||||
| 
 | ||||
|     await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow( | ||||
|         "Unable to determine HOME directory" | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("resolvePaths inclusion pattern returns found", async () => { | ||||
|     const pattern = "*.ts"; | ||||
|     // Create the following layout:
 | ||||
|     //   inclusion-patterns
 | ||||
|     //   inclusion-patterns/miss.txt
 | ||||
|     //   inclusion-patterns/test.ts
 | ||||
| 
 | ||||
|     const root = path.join(getTempDir(), "inclusion-patterns"); | ||||
|     // tarball entries will be relative to workspace
 | ||||
|     process.env["GITHUB_WORKSPACE"] = root; | ||||
| 
 | ||||
|     await fs.mkdir(root, { recursive: true }); | ||||
|     await fs.writeFile(path.join(root, "miss.txt"), "no match"); | ||||
|     await fs.writeFile(path.join(root, "test.ts"), "match"); | ||||
| 
 | ||||
|     const originalCwd = process.cwd(); | ||||
| 
 | ||||
|     try { | ||||
|         process.chdir(root); | ||||
| 
 | ||||
|         const resolvedPath = await actionUtils.resolvePaths([pattern]); | ||||
| 
 | ||||
|         const expectedPath = ["test.ts"]; | ||||
|         expect(resolvedPath).toStrictEqual(expectedPath); | ||||
|     } finally { | ||||
|         process.chdir(originalCwd); | ||||
|     } | ||||
| }); | ||||
| 
 | ||||
| test("resolvePaths exclusion pattern returns not found", async () => { | ||||
|     const patterns = ["*.ts", "!test.ts"]; | ||||
|     // Create the following layout:
 | ||||
|     //   exclusion-patterns
 | ||||
|     //   exclusion-patterns/miss.txt
 | ||||
|     //   exclusion-patterns/test.ts
 | ||||
| 
 | ||||
|     const root = path.join(getTempDir(), "exclusion-patterns"); | ||||
|     // tarball entries will be relative to workspace
 | ||||
|     process.env["GITHUB_WORKSPACE"] = root; | ||||
| 
 | ||||
|     await fs.mkdir(root, { recursive: true }); | ||||
|     await fs.writeFile(path.join(root, "miss.txt"), "no match"); | ||||
|     await fs.writeFile(path.join(root, "test.ts"), "no match"); | ||||
| 
 | ||||
|     const originalCwd = process.cwd(); | ||||
| 
 | ||||
|     try { | ||||
|         process.chdir(root); | ||||
| 
 | ||||
|         const resolvedPath = await actionUtils.resolvePaths(patterns); | ||||
| 
 | ||||
|         const expectedPath = []; | ||||
|         expect(resolvedPath).toStrictEqual(expectedPath); | ||||
|     } finally { | ||||
|         process.chdir(originalCwd); | ||||
|     } | ||||
| }); | ||||
| 
 | ||||
| test("isValidEvent returns true for event that has a ref", () => { | ||||
|     const event = Events.Push; | ||||
|     process.env[Events.Key] = event; | ||||
|  | @ -335,16 +157,3 @@ test("isValidEvent returns true for event that has a ref", () => { | |||
| 
 | ||||
|     expect(isValidEvent).toBe(true); | ||||
| }); | ||||
| 
 | ||||
| test("unlinkFile unlinks file", async () => { | ||||
|     const testDirectory = await fs.mkdtemp("unlinkFileTest"); | ||||
|     const testFile = path.join(testDirectory, "test.txt"); | ||||
|     await fs.writeFile(testFile, "hello world"); | ||||
| 
 | ||||
|     await actionUtils.unlinkFile(testFile); | ||||
| 
 | ||||
|     // This should throw as testFile should not exist
 | ||||
|     await expect(fs.stat(testFile)).rejects.toThrow(); | ||||
| 
 | ||||
|     await fs.rmdir(testDirectory); | ||||
| }); | ||||
|  |  | |||
|  | @ -1,177 +0,0 @@ | |||
| import { getCacheVersion, retry } from "../src/cacheHttpClient"; | ||||
| import { CompressionMethod, Inputs } from "../src/constants"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
| 
 | ||||
| afterEach(() => { | ||||
|     testUtils.clearInputs(); | ||||
| }); | ||||
| 
 | ||||
| test("getCacheVersion with path input and compression method undefined returns version", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
| 
 | ||||
|     const result = getCacheVersion(); | ||||
| 
 | ||||
|     expect(result).toEqual( | ||||
|         "b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985" | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("getCacheVersion with zstd compression returns version", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
|     const result = getCacheVersion(CompressionMethod.Zstd); | ||||
| 
 | ||||
|     expect(result).toEqual( | ||||
|         "273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24" | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("getCacheVersion with gzip compression does not change vesion", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
|     const result = getCacheVersion(CompressionMethod.Gzip); | ||||
| 
 | ||||
|     expect(result).toEqual( | ||||
|         "b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985" | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("getCacheVersion with no input throws", async () => { | ||||
|     expect(() => getCacheVersion()).toThrow(); | ||||
| }); | ||||
| 
 | ||||
| interface TestResponse { | ||||
|     statusCode: number; | ||||
|     result: string | null; | ||||
| } | ||||
| 
 | ||||
| function handleResponse( | ||||
|     response: TestResponse | undefined | ||||
| ): Promise<TestResponse> { | ||||
|     if (!response) { | ||||
|         fail("Retry method called too many times"); | ||||
|     } | ||||
| 
 | ||||
|     if (response.statusCode === 999) { | ||||
|         throw Error("Test Error"); | ||||
|     } else { | ||||
|         return Promise.resolve(response); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| async function testRetryExpectingResult( | ||||
|     responses: Array<TestResponse>, | ||||
|     expectedResult: string | null | ||||
| ): Promise<void> { | ||||
|     responses = responses.reverse(); // Reverse responses since we pop from end
 | ||||
| 
 | ||||
|     const actualResult = await retry( | ||||
|         "test", | ||||
|         () => handleResponse(responses.pop()), | ||||
|         (response: TestResponse) => response.statusCode | ||||
|     ); | ||||
| 
 | ||||
|     expect(actualResult.result).toEqual(expectedResult); | ||||
| } | ||||
| 
 | ||||
| async function testRetryExpectingError( | ||||
|     responses: Array<TestResponse> | ||||
| ): Promise<void> { | ||||
|     responses = responses.reverse(); // Reverse responses since we pop from end
 | ||||
| 
 | ||||
|     expect( | ||||
|         retry( | ||||
|             "test", | ||||
|             () => handleResponse(responses.pop()), | ||||
|             (response: TestResponse) => response.statusCode | ||||
|         ) | ||||
|     ).rejects.toBeInstanceOf(Error); | ||||
| } | ||||
| 
 | ||||
| test("retry works on successful response", async () => { | ||||
|     await testRetryExpectingResult( | ||||
|         [ | ||||
|             { | ||||
|                 statusCode: 200, | ||||
|                 result: "Ok" | ||||
|             } | ||||
|         ], | ||||
|         "Ok" | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("retry works after retryable status code", async () => { | ||||
|     await testRetryExpectingResult( | ||||
|         [ | ||||
|             { | ||||
|                 statusCode: 503, | ||||
|                 result: null | ||||
|             }, | ||||
|             { | ||||
|                 statusCode: 200, | ||||
|                 result: "Ok" | ||||
|             } | ||||
|         ], | ||||
|         "Ok" | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("retry fails after exhausting retries", async () => { | ||||
|     await testRetryExpectingError([ | ||||
|         { | ||||
|             statusCode: 503, | ||||
|             result: null | ||||
|         }, | ||||
|         { | ||||
|             statusCode: 503, | ||||
|             result: null | ||||
|         }, | ||||
|         { | ||||
|             statusCode: 200, | ||||
|             result: "Ok" | ||||
|         } | ||||
|     ]); | ||||
| }); | ||||
| 
 | ||||
| test("retry fails after non-retryable status code", async () => { | ||||
|     await testRetryExpectingError([ | ||||
|         { | ||||
|             statusCode: 500, | ||||
|             result: null | ||||
|         }, | ||||
|         { | ||||
|             statusCode: 200, | ||||
|             result: "Ok" | ||||
|         } | ||||
|     ]); | ||||
| }); | ||||
| 
 | ||||
| test("retry works after error", async () => { | ||||
|     await testRetryExpectingResult( | ||||
|         [ | ||||
|             { | ||||
|                 statusCode: 999, | ||||
|                 result: null | ||||
|             }, | ||||
|             { | ||||
|                 statusCode: 200, | ||||
|                 result: "Ok" | ||||
|             } | ||||
|         ], | ||||
|         "Ok" | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("retry returns after client error", async () => { | ||||
|     await testRetryExpectingResult( | ||||
|         [ | ||||
|             { | ||||
|                 statusCode: 400, | ||||
|                 result: null | ||||
|             }, | ||||
|             { | ||||
|                 statusCode: 200, | ||||
|                 result: "Ok" | ||||
|             } | ||||
|         ], | ||||
|         null | ||||
|     ); | ||||
| }); | ||||
|  | @ -1,22 +1,11 @@ | |||
| import * as cache from "@actions/cache"; | ||||
| import * as core from "@actions/core"; | ||||
| import * as path from "path"; | ||||
| 
 | ||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||
| import { | ||||
|     CacheFilename, | ||||
|     CompressionMethod, | ||||
|     Events, | ||||
|     Inputs, | ||||
|     RefKey | ||||
| } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import { Events, Inputs, RefKey } from "../src/constants"; | ||||
| import run from "../src/restore"; | ||||
| import * as tar from "../src/tar"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
| 
 | ||||
| jest.mock("../src/cacheHttpClient"); | ||||
| jest.mock("../src/tar"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
| 
 | ||||
| beforeAll(() => { | ||||
|  | @ -31,11 +20,6 @@ beforeAll(() => { | |||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.isValidEvent(); | ||||
|     }); | ||||
| 
 | ||||
|     jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.getCacheFileName(cm); | ||||
|     }); | ||||
| }); | ||||
| 
 | ||||
| beforeEach(() => { | ||||
|  | @ -64,7 +48,9 @@ test("restore with invalid event outputs warning", async () => { | |||
| 
 | ||||
| test("restore with no path should fail", async () => { | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const restoreCacheMock = jest.spyOn(cache, "restoreCache"); | ||||
|     await run(); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(0); | ||||
|     // this input isn't necessary for restore b/c tarball contains entries relative to workspace
 | ||||
|     expect(failedMock).not.toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: path" | ||||
|  | @ -74,71 +60,89 @@ test("restore with no path should fail", async () => { | |||
| test("restore with no key", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const restoreCacheMock = jest.spyOn(cache, "restoreCache"); | ||||
|     await run(); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(0); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: key" | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("restore with too many keys should fail", async () => { | ||||
|     const path = "node_modules"; | ||||
|     const key = "node-test"; | ||||
|     const restoreKeys = [...Array(20).keys()].map(x => x.toString()); | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         path: path, | ||||
|         key, | ||||
|         restoreKeys | ||||
|     }); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const restoreCacheMock = jest.spyOn(cache, "restoreCache"); | ||||
|     await run(); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledWith([path], key, restoreKeys); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         `Key Validation Error: Keys are limited to a maximum of 10.` | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("restore with large key should fail", async () => { | ||||
|     const path = "node_modules"; | ||||
|     const key = "foo".repeat(512); // Over the 512 character limit
 | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         path: path, | ||||
|         key | ||||
|     }); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const restoreCacheMock = jest.spyOn(cache, "restoreCache"); | ||||
|     await run(); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         `Key Validation Error: ${key} cannot be larger than 512 characters.` | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("restore with invalid key should fail", async () => { | ||||
|     const path = "node_modules"; | ||||
|     const key = "comma,comma"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         path: path, | ||||
|         key | ||||
|     }); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const restoreCacheMock = jest.spyOn(cache, "restoreCache"); | ||||
|     await run(); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         `Key Validation Error: ${key} cannot contain commas.` | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("restore with no cache found", async () => { | ||||
|     const path = "node_modules"; | ||||
|     const key = "node-test"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         path: path, | ||||
|         key | ||||
|     }); | ||||
| 
 | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     clientMock.mockImplementation(() => { | ||||
|         return Promise.resolve(null); | ||||
|     const restoreCacheMock = jest | ||||
|         .spyOn(cache, "restoreCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             return Promise.resolve(undefined); | ||||
|         }); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); | ||||
| 
 | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| 
 | ||||
|  | @ -148,25 +152,28 @@ test("restore with no cache found", async () => { | |||
| }); | ||||
| 
 | ||||
| test("restore with server error should fail", async () => { | ||||
|     const path = "node_modules"; | ||||
|     const key = "node-test"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         path: path, | ||||
|         key | ||||
|     }); | ||||
| 
 | ||||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     clientMock.mockImplementation(() => { | ||||
|     const restoreCacheMock = jest | ||||
|         .spyOn(cache, "restoreCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             throw new Error("HTTP Error Occurred"); | ||||
|         }); | ||||
| 
 | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); | ||||
| 
 | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
| 
 | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|  | @ -179,10 +186,11 @@ test("restore with server error should fail", async () => { | |||
| }); | ||||
| 
 | ||||
| test("restore with restore keys and no cache found", async () => { | ||||
|     const path = "node_modules"; | ||||
|     const key = "node-test"; | ||||
|     const restoreKey = "node-"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         path: path, | ||||
|         key, | ||||
|         restoreKeys: [restoreKey] | ||||
|     }); | ||||
|  | @ -190,14 +198,17 @@ test("restore with restore keys and no cache found", async () => { | |||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     clientMock.mockImplementation(() => { | ||||
|         return Promise.resolve(null); | ||||
|     const restoreCacheMock = jest | ||||
|         .spyOn(cache, "restoreCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             return Promise.resolve(undefined); | ||||
|         }); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]); | ||||
| 
 | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| 
 | ||||
|  | @ -206,161 +217,43 @@ test("restore with restore keys and no cache found", async () => { | |||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("restore with gzip compressed cache found", async () => { | ||||
| test("restore with cache found for key", async () => { | ||||
|     const path = "node_modules"; | ||||
|     const key = "node-test"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         path: path, | ||||
|         key | ||||
|     }); | ||||
| 
 | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: key, | ||||
|         scope: "refs/heads/master", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     getCacheMock.mockImplementation(() => { | ||||
|         return Promise.resolve(cacheEntry); | ||||
|     }); | ||||
|     const tempPath = "/foo/bar"; | ||||
| 
 | ||||
|     const createTempDirectoryMock = jest.spyOn( | ||||
|         actionUtils, | ||||
|         "createTempDirectory" | ||||
|     ); | ||||
|     createTempDirectoryMock.mockImplementation(() => { | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
| 
 | ||||
|     const archivePath = path.join(tempPath, CacheFilename.Gzip); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
| 
 | ||||
|     const fileSize = 142; | ||||
|     const getArchiveFileSizeMock = jest | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
| 
 | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
| 
 | ||||
|     const compression = CompressionMethod.Gzip; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|     const restoreCacheMock = jest | ||||
|         .spyOn(cache, "restoreCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             return Promise.resolve(key); | ||||
|         }); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); | ||||
| 
 | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key], { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
| 
 | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); | ||||
| 
 | ||||
|     expect(unlinkFileMock).toHaveBeenCalledTimes(1); | ||||
|     expect(unlinkFileMock).toHaveBeenCalledWith(archivePath); | ||||
| 
 | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||
| 
 | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
| 
 | ||||
| test("restore with a pull request event and zstd compressed cache found", async () => { | ||||
|     const key = "node-test"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
|     }); | ||||
| 
 | ||||
|     process.env[Events.Key] = Events.PullRequest; | ||||
| 
 | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: key, | ||||
|         scope: "refs/heads/master", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     getCacheMock.mockImplementation(() => { | ||||
|         return Promise.resolve(cacheEntry); | ||||
|     }); | ||||
|     const tempPath = "/foo/bar"; | ||||
| 
 | ||||
|     const createTempDirectoryMock = jest.spyOn( | ||||
|         actionUtils, | ||||
|         "createTempDirectory" | ||||
|     ); | ||||
|     createTempDirectoryMock.mockImplementation(() => { | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
| 
 | ||||
|     const archivePath = path.join(tempPath, CacheFilename.Zstd); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
| 
 | ||||
|     const fileSize = 62915000; | ||||
|     const getArchiveFileSizeMock = jest | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
| 
 | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key], { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); | ||||
| 
 | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); | ||||
| 
 | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||
| 
 | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
| 
 | ||||
| test("restore with cache found for restore key", async () => { | ||||
|     const path = "node_modules"; | ||||
|     const key = "node-test"; | ||||
|     const restoreKey = "node-"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         path: path, | ||||
|         key, | ||||
|         restoreKeys: [restoreKey] | ||||
|     }); | ||||
|  | @ -368,60 +261,19 @@ test("restore with cache found for restore key", async () => { | |||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
| 
 | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: restoreKey, | ||||
|         scope: "refs/heads/master", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     getCacheMock.mockImplementation(() => { | ||||
|         return Promise.resolve(cacheEntry); | ||||
|     }); | ||||
|     const tempPath = "/foo/bar"; | ||||
| 
 | ||||
|     const createTempDirectoryMock = jest.spyOn( | ||||
|         actionUtils, | ||||
|         "createTempDirectory" | ||||
|     ); | ||||
|     createTempDirectoryMock.mockImplementation(() => { | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
| 
 | ||||
|     const archivePath = path.join(tempPath, CacheFilename.Zstd); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
| 
 | ||||
|     const fileSize = 142; | ||||
|     const getArchiveFileSizeMock = jest | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
| 
 | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|     const restoreCacheMock = jest | ||||
|         .spyOn(cache, "restoreCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             return Promise.resolve(restoreKey); | ||||
|         }); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(restoreCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]); | ||||
| 
 | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); | ||||
| 
 | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); | ||||
| 
 | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||
| 
 | ||||
|  | @ -429,5 +281,4 @@ test("restore with cache found for restore key", async () => { | |||
|         `Cache restored from key: ${restoreKey}` | ||||
|     ); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  |  | |||
|  | @ -1,23 +1,13 @@ | |||
| import * as cache from "@actions/cache"; | ||||
| import * as core from "@actions/core"; | ||||
| import * as path from "path"; | ||||
| 
 | ||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||
| import { | ||||
|     CacheFilename, | ||||
|     CompressionMethod, | ||||
|     Events, | ||||
|     Inputs, | ||||
|     RefKey | ||||
| } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import { Events, Inputs, RefKey } from "../src/constants"; | ||||
| import run from "../src/save"; | ||||
| import * as tar from "../src/tar"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
| 
 | ||||
| jest.mock("@actions/core"); | ||||
| jest.mock("../src/cacheHttpClient"); | ||||
| jest.mock("../src/tar"); | ||||
| jest.mock("@actions/cache"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
| 
 | ||||
| beforeAll(() => { | ||||
|  | @ -41,21 +31,6 @@ beforeAll(() => { | |||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.isValidEvent(); | ||||
|     }); | ||||
| 
 | ||||
|     jest.spyOn(actionUtils, "resolvePaths").mockImplementation( | ||||
|         async filePaths => { | ||||
|             return filePaths.map(x => path.resolve(x)); | ||||
|         } | ||||
|     ); | ||||
| 
 | ||||
|     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { | ||||
|         return Promise.resolve("/foo/bar"); | ||||
|     }); | ||||
| 
 | ||||
|     jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.getCacheFileName(cm); | ||||
|     }); | ||||
| }); | ||||
| 
 | ||||
| beforeEach(() => { | ||||
|  | @ -86,25 +61,21 @@ test("save with no primary key in state outputs warning", async () => { | |||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
| 
 | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
| 
 | ||||
|     const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|             return savedCacheKey; | ||||
|         }) | ||||
|         // Cache Key State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return ""; | ||||
|         }); | ||||
|     const saveCacheMock = jest.spyOn(cache, "saveCache"); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(0); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         `Error retrieving key from state.` | ||||
|     ); | ||||
|  | @ -117,33 +88,25 @@ test("save with exact match returns early", async () => { | |||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
| 
 | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: primaryKey, | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const savedCacheKey = primaryKey; | ||||
| 
 | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|             return savedCacheKey; | ||||
|         }) | ||||
|         // Cache Key State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return primaryKey; | ||||
|         }); | ||||
| 
 | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
|     const saveCacheMock = jest.spyOn(cache, "saveCache"); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(0); | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` | ||||
|     ); | ||||
| 
 | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(0); | ||||
| 
 | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
| 
 | ||||
|  | @ -152,25 +115,22 @@ test("save with missing input outputs warning", async () => { | |||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
| 
 | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const savedCacheKey = "Linux-node-"; | ||||
| 
 | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|             return savedCacheKey; | ||||
|         }) | ||||
|         // Cache Key State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return primaryKey; | ||||
|         }); | ||||
|     const saveCacheMock = jest.spyOn(cache, "saveCache"); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(0); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: path" | ||||
|     ); | ||||
|  | @ -183,17 +143,12 @@ test("save with large cache outputs warning", async () => { | |||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
| 
 | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const savedCacheKey = "Linux-node-"; | ||||
| 
 | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|             return savedCacheKey; | ||||
|         }) | ||||
|         // Cache Key State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|  | @ -201,36 +156,26 @@ test("save with large cache outputs warning", async () => { | |||
|         }); | ||||
| 
 | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePaths = [path.resolve(inputPath)]; | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
| 
 | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
| 
 | ||||
|     const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
 | ||||
|     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { | ||||
|         return cacheSize; | ||||
|     const saveCacheMock = jest | ||||
|         .spyOn(cache, "saveCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             throw new Error( | ||||
|                 "Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache." | ||||
|             ); | ||||
|         }); | ||||
|     const compression = CompressionMethod.Gzip; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     const archiveFolder = "/foo/bar"; | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey); | ||||
| 
 | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith( | ||||
|         archiveFolder, | ||||
|         cachePaths, | ||||
|         compression | ||||
|     ); | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         "Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache." | ||||
|     ); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
| 
 | ||||
| test("save with reserve cache failure outputs warning", async () => { | ||||
|  | @ -239,17 +184,12 @@ test("save with reserve cache failure outputs warning", async () => { | |||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
| 
 | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const savedCacheKey = "Linux-node-"; | ||||
| 
 | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|             return savedCacheKey; | ||||
|         }) | ||||
|         // Cache Key State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|  | @ -259,35 +199,26 @@ test("save with reserve cache failure outputs warning", async () => { | |||
|     const inputPath = "node_modules"; | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
| 
 | ||||
|     const reserveCacheMock = jest | ||||
|         .spyOn(cacheHttpClient, "reserveCache") | ||||
|     const saveCacheMock = jest | ||||
|         .spyOn(cache, "saveCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             return Promise.resolve(-1); | ||||
|             const actualCache = jest.requireActual("@actions/cache"); | ||||
|             const error = new actualCache.ReserveCacheError( | ||||
|                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||
|             ); | ||||
|             throw error; | ||||
|         }); | ||||
| 
 | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey); | ||||
| 
 | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||
|     ); | ||||
| 
 | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(0); | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(0); | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(0); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
| 
 | ||||
| test("save with server error outputs warning", async () => { | ||||
|  | @ -295,17 +226,12 @@ test("save with server error outputs warning", async () => { | |||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
| 
 | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const savedCacheKey = "Linux-node-"; | ||||
| 
 | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|             return savedCacheKey; | ||||
|         }) | ||||
|         // Cache Key State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|  | @ -313,70 +239,35 @@ test("save with server error outputs warning", async () => { | |||
|         }); | ||||
| 
 | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePaths = [path.resolve(inputPath)]; | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
| 
 | ||||
|     const cacheId = 4; | ||||
|     const reserveCacheMock = jest | ||||
|         .spyOn(cacheHttpClient, "reserveCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             return Promise.resolve(cacheId); | ||||
|         }); | ||||
| 
 | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
| 
 | ||||
|     const saveCacheMock = jest | ||||
|         .spyOn(cacheHttpClient, "saveCache") | ||||
|         .spyOn(cache, "saveCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             throw new Error("HTTP Error Occurred"); | ||||
|         }); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
| 
 | ||||
|     const archiveFolder = "/foo/bar"; | ||||
|     const archiveFile = path.join(archiveFolder, CacheFilename.Zstd); | ||||
| 
 | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith( | ||||
|         archiveFolder, | ||||
|         cachePaths, | ||||
|         compression | ||||
|     ); | ||||
| 
 | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey); | ||||
| 
 | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||
| 
 | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
| 
 | ||||
| test("save with valid inputs uploads a cache", async () => { | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
| 
 | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const savedCacheKey = "Linux-node-"; | ||||
| 
 | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|             return savedCacheKey; | ||||
|         }) | ||||
|         // Cache Key State
 | ||||
|         .mockImplementationOnce(() => { | ||||
|  | @ -384,44 +275,19 @@ test("save with valid inputs uploads a cache", async () => { | |||
|         }); | ||||
| 
 | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePaths = [path.resolve(inputPath)]; | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
| 
 | ||||
|     const cacheId = 4; | ||||
|     const reserveCacheMock = jest | ||||
|         .spyOn(cacheHttpClient, "reserveCache") | ||||
|     const saveCacheMock = jest | ||||
|         .spyOn(cache, "saveCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             return Promise.resolve(cacheId); | ||||
|         }); | ||||
| 
 | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
| 
 | ||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
| 
 | ||||
|     await run(); | ||||
| 
 | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
| 
 | ||||
|     const archiveFolder = "/foo/bar"; | ||||
|     const archiveFile = path.join(archiveFolder, CacheFilename.Zstd); | ||||
| 
 | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith( | ||||
|         archiveFolder, | ||||
|         cachePaths, | ||||
|         compression | ||||
|     ); | ||||
| 
 | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey); | ||||
| 
 | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  |  | |||
|  | @ -1,204 +0,0 @@ | |||
| import * as exec from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as path from "path"; | ||||
| 
 | ||||
| import { CacheFilename, CompressionMethod } from "../src/constants"; | ||||
| import * as tar from "../src/tar"; | ||||
| import * as utils from "../src/utils/actionUtils"; | ||||
| 
 | ||||
| import fs = require("fs"); | ||||
| 
 | ||||
| jest.mock("@actions/exec"); | ||||
| jest.mock("@actions/io"); | ||||
| 
 | ||||
| const IS_WINDOWS = process.platform === "win32"; | ||||
| 
 | ||||
| function getTempDir(): string { | ||||
|     return path.join(__dirname, "_temp", "tar"); | ||||
| } | ||||
| 
 | ||||
| beforeAll(async () => { | ||||
|     jest.spyOn(io, "which").mockImplementation(tool => { | ||||
|         return Promise.resolve(tool); | ||||
|     }); | ||||
| 
 | ||||
|     process.env["GITHUB_WORKSPACE"] = process.cwd(); | ||||
|     await jest.requireActual("@actions/io").rmRF(getTempDir()); | ||||
| }); | ||||
| 
 | ||||
| afterAll(async () => { | ||||
|     delete process.env["GITHUB_WORKSPACE"]; | ||||
|     await jest.requireActual("@actions/io").rmRF(getTempDir()); | ||||
| }); | ||||
| 
 | ||||
| test("zstd extract tar", async () => { | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
| 
 | ||||
|     const archivePath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\fakepath\\cache.tar` | ||||
|         : "cache.tar"; | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
| 
 | ||||
|     await tar.extractTar(archivePath, CompressionMethod.Zstd); | ||||
| 
 | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(workspace); | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith( | ||||
|         `"${tarPath}"`, | ||||
|         [ | ||||
|             "--use-compress-program", | ||||
|             "zstd -d --long=30", | ||||
|             "-xf", | ||||
|             IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, | ||||
|             "-P", | ||||
|             "-C", | ||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace | ||||
|         ], | ||||
|         { cwd: undefined } | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("gzip extract tar", async () => { | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const archivePath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\fakepath\\cache.tar` | ||||
|         : "cache.tar"; | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
| 
 | ||||
|     await tar.extractTar(archivePath, CompressionMethod.Gzip); | ||||
| 
 | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(workspace); | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith( | ||||
|         `"${tarPath}"`, | ||||
|         [ | ||||
|             "-z", | ||||
|             "-xf", | ||||
|             IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, | ||||
|             "-P", | ||||
|             "-C", | ||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace | ||||
|         ], | ||||
|         { cwd: undefined } | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("gzip extract GNU tar on windows", async () => { | ||||
|     if (IS_WINDOWS) { | ||||
|         jest.spyOn(fs, "existsSync").mockReturnValueOnce(false); | ||||
| 
 | ||||
|         const isGnuMock = jest | ||||
|             .spyOn(utils, "useGnuTar") | ||||
|             .mockReturnValue(Promise.resolve(true)); | ||||
|         const execMock = jest.spyOn(exec, "exec"); | ||||
|         const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`; | ||||
|         const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
| 
 | ||||
|         await tar.extractTar(archivePath, CompressionMethod.Gzip); | ||||
| 
 | ||||
|         expect(isGnuMock).toHaveBeenCalledTimes(1); | ||||
|         expect(execMock).toHaveBeenCalledTimes(1); | ||||
|         expect(execMock).toHaveBeenCalledWith( | ||||
|             `"tar"`, | ||||
|             [ | ||||
|                 "-z", | ||||
|                 "-xf", | ||||
|                 archivePath.replace(/\\/g, "/"), | ||||
|                 "-P", | ||||
|                 "-C", | ||||
|                 workspace?.replace(/\\/g, "/"), | ||||
|                 "--force-local" | ||||
|             ], | ||||
|             { cwd: undefined } | ||||
|         ); | ||||
|     } | ||||
| }); | ||||
| 
 | ||||
| test("zstd create tar", async () => { | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
| 
 | ||||
|     const archiveFolder = getTempDir(); | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
|     const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`]; | ||||
| 
 | ||||
|     await fs.promises.mkdir(archiveFolder, { recursive: true }); | ||||
| 
 | ||||
|     await tar.createTar( | ||||
|         archiveFolder, | ||||
|         sourceDirectories, | ||||
|         CompressionMethod.Zstd | ||||
|     ); | ||||
| 
 | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
| 
 | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith( | ||||
|         `"${tarPath}"`, | ||||
|         [ | ||||
|             "--use-compress-program", | ||||
|             "zstd -T0 --long=30", | ||||
|             "-cf", | ||||
|             IS_WINDOWS | ||||
|                 ? CacheFilename.Zstd.replace(/\\/g, "/") | ||||
|                 : CacheFilename.Zstd, | ||||
|             "-P", | ||||
|             "-C", | ||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace, | ||||
|             "--files-from", | ||||
|             "manifest.txt" | ||||
|         ], | ||||
|         { | ||||
|             cwd: archiveFolder | ||||
|         } | ||||
|     ); | ||||
| }); | ||||
| 
 | ||||
| test("gzip create tar", async () => { | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
| 
 | ||||
|     const archiveFolder = getTempDir(); | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
|     const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`]; | ||||
| 
 | ||||
|     await fs.promises.mkdir(archiveFolder, { recursive: true }); | ||||
| 
 | ||||
|     await tar.createTar( | ||||
|         archiveFolder, | ||||
|         sourceDirectories, | ||||
|         CompressionMethod.Gzip | ||||
|     ); | ||||
| 
 | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
| 
 | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith( | ||||
|         `"${tarPath}"`, | ||||
|         [ | ||||
|             "-z", | ||||
|             "-cf", | ||||
|             IS_WINDOWS | ||||
|                 ? CacheFilename.Gzip.replace(/\\/g, "/") | ||||
|                 : CacheFilename.Gzip, | ||||
|             "-P", | ||||
|             "-C", | ||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace, | ||||
|             "--files-from", | ||||
|             "manifest.txt" | ||||
|         ], | ||||
|         { | ||||
|             cwd: archiveFolder | ||||
|         } | ||||
|     ); | ||||
| }); | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -4,6 +4,26 @@ | |||
|   "lockfileVersion": 1, | ||||
|   "requires": true, | ||||
|   "dependencies": { | ||||
|     "@actions/cache": { | ||||
|       "version": "0.1.0", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-0.1.0.tgz", | ||||
|       "integrity": "sha512-mP4t+AdMqSgx7hQn9fp3b1xWD7lIAqKj2IQ2MCgiyB6ivIBeXxnAVupjjGpaTlCQCmnL0E/pO51QAM1uvd4PRg==", | ||||
|       "requires": { | ||||
|         "@actions/core": "^1.2.4", | ||||
|         "@actions/exec": "^1.0.1", | ||||
|         "@actions/glob": "^0.1.0", | ||||
|         "@actions/http-client": "^1.0.8", | ||||
|         "@actions/io": "^1.0.1", | ||||
|         "uuid": "^3.3.3" | ||||
|       }, | ||||
|       "dependencies": { | ||||
|         "@actions/core": { | ||||
|           "version": "1.2.4", | ||||
|           "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.4.tgz", | ||||
|           "integrity": "sha512-YJCEq8BE3CdN8+7HPZ/4DxJjk/OkZV2FFIf+DlZTC/4iBlzYCD5yjRR6eiOS5llO11zbRltIRuKAjMKaWTE6cg==" | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
|     "@actions/core": { | ||||
|       "version": "1.2.0", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.0.tgz", | ||||
|  | @ -913,15 +933,6 @@ | |||
|       "integrity": "sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==", | ||||
|       "dev": true | ||||
|     }, | ||||
|     "@types/uuid": { | ||||
|       "version": "3.4.5", | ||||
|       "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.5.tgz", | ||||
|       "integrity": "sha512-MNL15wC3EKyw1VLF+RoVO4hJJdk9t/Hlv3rt1OL65Qvuadm4BYo6g9ZJQqoq7X8NBFSsQXgAujWciovh2lpVjA==", | ||||
|       "dev": true, | ||||
|       "requires": { | ||||
|         "@types/node": "*" | ||||
|       } | ||||
|     }, | ||||
|     "@types/yargs": { | ||||
|       "version": "12.0.12", | ||||
|       "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-12.0.12.tgz", | ||||
|  |  | |||
|  | @ -25,16 +25,13 @@ | |||
|   "dependencies": { | ||||
|     "@actions/core": "^1.2.0", | ||||
|     "@actions/exec": "^1.0.1", | ||||
|     "@actions/glob": "^0.1.0", | ||||
|     "@actions/http-client": "^1.0.8", | ||||
|     "@actions/io": "^1.0.1", | ||||
|     "uuid": "^3.3.3" | ||||
|     "@actions/cache": "^0.1.0" | ||||
|   }, | ||||
|   "devDependencies": { | ||||
|     "@types/jest": "^24.0.13", | ||||
|     "@types/nock": "^11.1.0", | ||||
|     "@types/node": "^12.0.4", | ||||
|     "@types/uuid": "^3.4.5", | ||||
|     "@typescript-eslint/eslint-plugin": "^2.7.0", | ||||
|     "@typescript-eslint/parser": "^2.7.0", | ||||
|     "@zeit/ncc": "^0.20.5", | ||||
|  |  | |||
|  | @ -1,424 +0,0 @@ | |||
| import * as core from "@actions/core"; | ||||
| import { HttpClient, HttpCodes } from "@actions/http-client"; | ||||
| import { BearerCredentialHandler } from "@actions/http-client/auth"; | ||||
| import { | ||||
|     IHttpClientResponse, | ||||
|     IRequestOptions, | ||||
|     ITypedResponse | ||||
| } from "@actions/http-client/interfaces"; | ||||
| import * as crypto from "crypto"; | ||||
| import * as fs from "fs"; | ||||
| import * as stream from "stream"; | ||||
| import * as util from "util"; | ||||
| 
 | ||||
| import { CompressionMethod, Inputs, SocketTimeout } from "./constants"; | ||||
| import { | ||||
|     ArtifactCacheEntry, | ||||
|     CacheOptions, | ||||
|     CommitCacheRequest, | ||||
|     ReserveCacheRequest, | ||||
|     ReserveCacheResponse | ||||
| } from "./contracts"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
| 
 | ||||
| const versionSalt = "1.0"; | ||||
| 
 | ||||
| function isSuccessStatusCode(statusCode?: number): boolean { | ||||
|     if (!statusCode) { | ||||
|         return false; | ||||
|     } | ||||
|     return statusCode >= 200 && statusCode < 300; | ||||
| } | ||||
| 
 | ||||
| function isServerErrorStatusCode(statusCode?: number): boolean { | ||||
|     if (!statusCode) { | ||||
|         return true; | ||||
|     } | ||||
|     return statusCode >= 500; | ||||
| } | ||||
| 
 | ||||
| function isRetryableStatusCode(statusCode?: number): boolean { | ||||
|     if (!statusCode) { | ||||
|         return false; | ||||
|     } | ||||
|     const retryableStatusCodes = [ | ||||
|         HttpCodes.BadGateway, | ||||
|         HttpCodes.ServiceUnavailable, | ||||
|         HttpCodes.GatewayTimeout | ||||
|     ]; | ||||
|     return retryableStatusCodes.includes(statusCode); | ||||
| } | ||||
| 
 | ||||
| function getCacheApiUrl(resource: string): string { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL
 | ||||
|     const baseUrl: string = ( | ||||
|         process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "" | ||||
|     ).replace("pipelines", "artifactcache"); | ||||
|     if (!baseUrl) { | ||||
|         throw new Error( | ||||
|             "Cache Service Url not found, unable to restore cache." | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     const url = `${baseUrl}_apis/artifactcache/${resource}`; | ||||
|     core.debug(`Resource Url: ${url}`); | ||||
|     return url; | ||||
| } | ||||
| 
 | ||||
| function createAcceptHeader(type: string, apiVersion: string): string { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
| 
 | ||||
| function getRequestOptions(): IRequestOptions { | ||||
|     const requestOptions: IRequestOptions = { | ||||
|         headers: { | ||||
|             Accept: createAcceptHeader("application/json", "6.0-preview.1") | ||||
|         } | ||||
|     }; | ||||
| 
 | ||||
|     return requestOptions; | ||||
| } | ||||
| 
 | ||||
| function createHttpClient(): HttpClient { | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||
| 
 | ||||
|     return new HttpClient( | ||||
|         "actions/cache", | ||||
|         [bearerCredentialHandler], | ||||
|         getRequestOptions() | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| export function getCacheVersion(compressionMethod?: CompressionMethod): string { | ||||
|     const components = [core.getInput(Inputs.Path, { required: true })].concat( | ||||
|         compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : [] | ||||
|     ); | ||||
| 
 | ||||
|     // Add salt to cache version to support breaking changes in cache entry
 | ||||
|     components.push(versionSalt); | ||||
| 
 | ||||
|     return crypto | ||||
|         .createHash("sha256") | ||||
|         .update(components.join("|")) | ||||
|         .digest("hex"); | ||||
| } | ||||
| 
 | ||||
| export async function retry<T>( | ||||
|     name: string, | ||||
|     method: () => Promise<T>, | ||||
|     getStatusCode: (T) => number | undefined, | ||||
|     maxAttempts = 2 | ||||
| ): Promise<T> { | ||||
|     let response: T | undefined = undefined; | ||||
|     let statusCode: number | undefined = undefined; | ||||
|     let isRetryable = false; | ||||
|     let errorMessage = ""; | ||||
|     let attempt = 1; | ||||
| 
 | ||||
|     while (attempt <= maxAttempts) { | ||||
|         try { | ||||
|             response = await method(); | ||||
|             statusCode = getStatusCode(response); | ||||
| 
 | ||||
|             if (!isServerErrorStatusCode(statusCode)) { | ||||
|                 return response; | ||||
|             } | ||||
| 
 | ||||
|             isRetryable = isRetryableStatusCode(statusCode); | ||||
|             errorMessage = `Cache service responded with ${statusCode}`; | ||||
|         } catch (error) { | ||||
|             isRetryable = true; | ||||
|             errorMessage = error.message; | ||||
|         } | ||||
| 
 | ||||
|         core.debug( | ||||
|             `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}` | ||||
|         ); | ||||
| 
 | ||||
|         if (!isRetryable) { | ||||
|             core.debug(`${name} - Error is not retryable`); | ||||
|             break; | ||||
|         } | ||||
| 
 | ||||
|         attempt++; | ||||
|     } | ||||
| 
 | ||||
|     throw Error(`${name} failed: ${errorMessage}`); | ||||
| } | ||||
| 
 | ||||
| export async function retryTypedResponse<T>( | ||||
|     name: string, | ||||
|     method: () => Promise<ITypedResponse<T>>, | ||||
|     maxAttempts = 2 | ||||
| ): Promise<ITypedResponse<T>> { | ||||
|     return await retry( | ||||
|         name, | ||||
|         method, | ||||
|         (response: ITypedResponse<T>) => response.statusCode, | ||||
|         maxAttempts | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| export async function retryHttpClientResponse<T>( | ||||
|     name: string, | ||||
|     method: () => Promise<IHttpClientResponse>, | ||||
|     maxAttempts = 2 | ||||
| ): Promise<IHttpClientResponse> { | ||||
|     return await retry( | ||||
|         name, | ||||
|         method, | ||||
|         (response: IHttpClientResponse) => response.message.statusCode, | ||||
|         maxAttempts | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| export async function getCacheEntry( | ||||
|     keys: string[], | ||||
|     options?: CacheOptions | ||||
| ): Promise<ArtifactCacheEntry | null> { | ||||
|     const httpClient = createHttpClient(); | ||||
|     const version = getCacheVersion(options?.compressionMethod); | ||||
|     const resource = `cache?keys=${encodeURIComponent( | ||||
|         keys.join(",") | ||||
|     )}&version=${version}`;
 | ||||
| 
 | ||||
|     const response = await retryTypedResponse("getCacheEntry", () => | ||||
|         httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource)) | ||||
|     ); | ||||
|     if (response.statusCode === 204) { | ||||
|         return null; | ||||
|     } | ||||
|     if (!isSuccessStatusCode(response.statusCode)) { | ||||
|         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|     } | ||||
| 
 | ||||
|     const cacheResult = response.result; | ||||
|     const cacheDownloadUrl = cacheResult?.archiveLocation; | ||||
|     if (!cacheDownloadUrl) { | ||||
|         throw new Error("Cache not found."); | ||||
|     } | ||||
|     core.setSecret(cacheDownloadUrl); | ||||
|     core.debug(`Cache Result:`); | ||||
|     core.debug(JSON.stringify(cacheResult)); | ||||
| 
 | ||||
|     return cacheResult; | ||||
| } | ||||
| 
 | ||||
| async function pipeResponseToStream( | ||||
|     response: IHttpClientResponse, | ||||
|     output: NodeJS.WritableStream | ||||
| ): Promise<void> { | ||||
|     const pipeline = util.promisify(stream.pipeline); | ||||
|     await pipeline(response.message, output); | ||||
| } | ||||
| 
 | ||||
| export async function downloadCache( | ||||
|     archiveLocation: string, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     const stream = fs.createWriteStream(archivePath); | ||||
|     const httpClient = new HttpClient("actions/cache"); | ||||
|     const downloadResponse = await retryHttpClientResponse( | ||||
|         "downloadCache", | ||||
|         () => httpClient.get(archiveLocation) | ||||
|     ); | ||||
| 
 | ||||
|     // Abort download if no traffic received over the socket.
 | ||||
|     downloadResponse.message.socket.setTimeout(SocketTimeout, () => { | ||||
|         downloadResponse.message.destroy(); | ||||
|         core.debug( | ||||
|             `Aborting download, socket timed out after ${SocketTimeout} ms` | ||||
|         ); | ||||
|     }); | ||||
| 
 | ||||
|     await pipeResponseToStream(downloadResponse, stream); | ||||
| 
 | ||||
|     // Validate download size.
 | ||||
|     const contentLengthHeader = | ||||
|         downloadResponse.message.headers["content-length"]; | ||||
| 
 | ||||
|     if (contentLengthHeader) { | ||||
|         const expectedLength = parseInt(contentLengthHeader); | ||||
|         const actualLength = utils.getArchiveFileSize(archivePath); | ||||
| 
 | ||||
|         if (actualLength != expectedLength) { | ||||
|             throw new Error( | ||||
|                 `Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}` | ||||
|             ); | ||||
|         } | ||||
|     } else { | ||||
|         core.debug("Unable to validate download, no Content-Length header"); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| // Reserve Cache
 | ||||
| export async function reserveCache( | ||||
|     key: string, | ||||
|     options?: CacheOptions | ||||
| ): Promise<number> { | ||||
|     const httpClient = createHttpClient(); | ||||
|     const version = getCacheVersion(options?.compressionMethod); | ||||
| 
 | ||||
|     const reserveCacheRequest: ReserveCacheRequest = { | ||||
|         key, | ||||
|         version | ||||
|     }; | ||||
|     const response = await retryTypedResponse("reserveCache", () => | ||||
|         httpClient.postJson<ReserveCacheResponse>( | ||||
|             getCacheApiUrl("caches"), | ||||
|             reserveCacheRequest | ||||
|         ) | ||||
|     ); | ||||
|     return response?.result?.cacheId ?? -1; | ||||
| } | ||||
| 
 | ||||
| function getContentRange(start: number, end: number): string { | ||||
|     // Format: `bytes start-end/filesize
 | ||||
|     // start and end are inclusive
 | ||||
|     // filesize can be *
 | ||||
|     // For a 200 byte chunk starting at byte 0:
 | ||||
|     // Content-Range: bytes 0-199/*
 | ||||
|     return `bytes ${start}-${end}/*`; | ||||
| } | ||||
| 
 | ||||
| async function uploadChunk( | ||||
|     httpClient: HttpClient, | ||||
|     resourceUrl: string, | ||||
|     openStream: () => NodeJS.ReadableStream, | ||||
|     start: number, | ||||
|     end: number | ||||
| ): Promise<void> { | ||||
|     core.debug( | ||||
|         `Uploading chunk of size ${end - | ||||
|             start + | ||||
|             1} bytes at offset ${start} with content range: ${getContentRange( | ||||
|             start, | ||||
|             end | ||||
|         )}` | ||||
|     ); | ||||
|     const additionalHeaders = { | ||||
|         "Content-Type": "application/octet-stream", | ||||
|         "Content-Range": getContentRange(start, end) | ||||
|     }; | ||||
| 
 | ||||
|     await retryHttpClientResponse( | ||||
|         `uploadChunk (start: ${start}, end: ${end})`, | ||||
|         () => | ||||
|             httpClient.sendStream( | ||||
|                 "PATCH", | ||||
|                 resourceUrl, | ||||
|                 openStream(), | ||||
|                 additionalHeaders | ||||
|             ) | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| function parseEnvNumber(key: string): number | undefined { | ||||
|     const value = Number(process.env[key]); | ||||
|     if (Number.isNaN(value) || value < 0) { | ||||
|         return undefined; | ||||
|     } | ||||
|     return value; | ||||
| } | ||||
| 
 | ||||
| async function uploadFile( | ||||
|     httpClient: HttpClient, | ||||
|     cacheId: number, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     // Upload Chunks
 | ||||
|     const fileSize = fs.statSync(archivePath).size; | ||||
|     const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); | ||||
|     const fd = fs.openSync(archivePath, "r"); | ||||
| 
 | ||||
|     const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
 | ||||
|     const MAX_CHUNK_SIZE = | ||||
|         parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
 | ||||
|     core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||
| 
 | ||||
|     const parallelUploads = [...new Array(concurrency).keys()]; | ||||
|     core.debug("Awaiting all uploads"); | ||||
|     let offset = 0; | ||||
| 
 | ||||
|     try { | ||||
|         await Promise.all( | ||||
|             parallelUploads.map(async () => { | ||||
|                 while (offset < fileSize) { | ||||
|                     const chunkSize = Math.min( | ||||
|                         fileSize - offset, | ||||
|                         MAX_CHUNK_SIZE | ||||
|                     ); | ||||
|                     const start = offset; | ||||
|                     const end = offset + chunkSize - 1; | ||||
|                     offset += MAX_CHUNK_SIZE; | ||||
| 
 | ||||
|                     await uploadChunk( | ||||
|                         httpClient, | ||||
|                         resourceUrl, | ||||
|                         () => | ||||
|                             fs | ||||
|                                 .createReadStream(archivePath, { | ||||
|                                     fd, | ||||
|                                     start, | ||||
|                                     end, | ||||
|                                     autoClose: false | ||||
|                                 }) | ||||
|                                 .on("error", error => { | ||||
|                                     throw new Error( | ||||
|                                         `Cache upload failed because file read failed with ${error.Message}` | ||||
|                                     ); | ||||
|                                 }), | ||||
|                         start, | ||||
|                         end | ||||
|                     ); | ||||
|                 } | ||||
|             }) | ||||
|         ); | ||||
|     } finally { | ||||
|         fs.closeSync(fd); | ||||
|     } | ||||
|     return; | ||||
| } | ||||
| 
 | ||||
| async function commitCache( | ||||
|     httpClient: HttpClient, | ||||
|     cacheId: number, | ||||
|     filesize: number | ||||
| ): Promise<ITypedResponse<null>> { | ||||
|     const commitCacheRequest: CommitCacheRequest = { size: filesize }; | ||||
|     return await retryTypedResponse("commitCache", () => | ||||
|         httpClient.postJson<null>( | ||||
|             getCacheApiUrl(`caches/${cacheId.toString()}`), | ||||
|             commitCacheRequest | ||||
|         ) | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| export async function saveCache( | ||||
|     cacheId: number, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     const httpClient = createHttpClient(); | ||||
| 
 | ||||
|     core.debug("Upload cache"); | ||||
|     await uploadFile(httpClient, cacheId, archivePath); | ||||
| 
 | ||||
|     // Commit Cache
 | ||||
|     core.debug("Commiting cache"); | ||||
|     const cacheSize = utils.getArchiveFileSize(archivePath); | ||||
|     const commitCacheResponse = await commitCache( | ||||
|         httpClient, | ||||
|         cacheId, | ||||
|         cacheSize | ||||
|     ); | ||||
|     if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||
|         throw new Error( | ||||
|             `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     core.info("Cache saved successfully"); | ||||
| } | ||||
|  | @ -19,19 +19,4 @@ export enum Events { | |||
|     PullRequest = "pull_request" | ||||
| } | ||||
| 
 | ||||
| export enum CacheFilename { | ||||
|     Gzip = "cache.tgz", | ||||
|     Zstd = "cache.tzst" | ||||
| } | ||||
| 
 | ||||
| export enum CompressionMethod { | ||||
|     Gzip = "gzip", | ||||
|     Zstd = "zstd" | ||||
| } | ||||
| 
 | ||||
| // Socket timeout in milliseconds during download.  If no traffic is received
 | ||||
| // over the socket during this period, the socket is destroyed and the download
 | ||||
| // is aborted.
 | ||||
| export const SocketTimeout = 5000; | ||||
| 
 | ||||
| export const RefKey = "GITHUB_REF"; | ||||
|  |  | |||
|  | @ -1,25 +0,0 @@ | |||
| import { CompressionMethod } from "./constants"; | ||||
| 
 | ||||
| export interface ArtifactCacheEntry { | ||||
|     cacheKey?: string; | ||||
|     scope?: string; | ||||
|     creationTime?: string; | ||||
|     archiveLocation?: string; | ||||
| } | ||||
| 
 | ||||
| export interface CommitCacheRequest { | ||||
|     size: number; | ||||
| } | ||||
| 
 | ||||
| export interface ReserveCacheRequest { | ||||
|     key: string; | ||||
|     version?: string; | ||||
| } | ||||
| 
 | ||||
| export interface ReserveCacheResponse { | ||||
|     cacheId: number; | ||||
| } | ||||
| 
 | ||||
| export interface CacheOptions { | ||||
|     compressionMethod?: CompressionMethod; | ||||
| } | ||||
|  | @ -1,9 +1,7 @@ | |||
| import * as cache from "@actions/cache"; | ||||
| import * as core from "@actions/core"; | ||||
| import * as path from "path"; | ||||
| 
 | ||||
| import * as cacheHttpClient from "./cacheHttpClient"; | ||||
| import { Events, Inputs, State } from "./constants"; | ||||
| import { extractTar } from "./tar"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
| 
 | ||||
| async function run(): Promise<void> { | ||||
|  | @ -25,90 +23,43 @@ async function run(): Promise<void> { | |||
|             .getInput(Inputs.RestoreKeys) | ||||
|             .split("\n") | ||||
|             .filter(x => x !== ""); | ||||
|         const keys = [primaryKey, ...restoreKeys]; | ||||
| 
 | ||||
|         core.debug("Resolved Keys:"); | ||||
|         core.debug(JSON.stringify(keys)); | ||||
| 
 | ||||
|         if (keys.length > 10) { | ||||
|             core.setFailed( | ||||
|                 `Key Validation Error: Keys are limited to a maximum of 10.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
|         for (const key of keys) { | ||||
|             if (key.length > 512) { | ||||
|                 core.setFailed( | ||||
|                     `Key Validation Error: ${key} cannot be larger than 512 characters.` | ||||
|                 ); | ||||
|                 return; | ||||
|             } | ||||
|             const regex = /^[^,]*$/; | ||||
|             if (!regex.test(key)) { | ||||
|                 core.setFailed( | ||||
|                     `Key Validation Error: ${key} cannot contain commas.` | ||||
|                 ); | ||||
|                 return; | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         const compressionMethod = await utils.getCompressionMethod(); | ||||
|         const cachePaths = core | ||||
|             .getInput(Inputs.Path, { required: true }) | ||||
|             .split("\n") | ||||
|             .filter(x => x !== ""); | ||||
| 
 | ||||
|         try { | ||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys, { | ||||
|                 compressionMethod: compressionMethod | ||||
|             }); | ||||
|             if (!cacheEntry?.archiveLocation) { | ||||
|                 core.info(`Cache not found for input keys: ${keys.join(", ")}`); | ||||
|             const cacheKey = await cache.restoreCache( | ||||
|                 cachePaths, | ||||
|                 primaryKey, | ||||
|                 restoreKeys | ||||
|             ); | ||||
|             if (!cacheKey) { | ||||
|                 core.info( | ||||
|                     `Cache not found for input keys: ${[ | ||||
|                         primaryKey, | ||||
|                         ...restoreKeys | ||||
|                     ].join(", ")}` | ||||
|                 ); | ||||
|                 return; | ||||
|             } | ||||
| 
 | ||||
|             const archivePath = path.join( | ||||
|                 await utils.createTempDirectory(), | ||||
|                 utils.getCacheFileName(compressionMethod) | ||||
|             ); | ||||
|             core.debug(`Archive Path: ${archivePath}`); | ||||
| 
 | ||||
|             // Store the cache result
 | ||||
|             utils.setCacheState(cacheEntry); | ||||
|             utils.setCacheState(cacheKey); | ||||
| 
 | ||||
|             try { | ||||
|                 // Download the cache from the cache entry
 | ||||
|                 await cacheHttpClient.downloadCache( | ||||
|                     cacheEntry.archiveLocation, | ||||
|                     archivePath | ||||
|                 ); | ||||
| 
 | ||||
|                 const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|                 core.info( | ||||
|                     `Cache Size: ~${Math.round( | ||||
|                         archiveFileSize / (1024 * 1024) | ||||
|                     )} MB (${archiveFileSize} B)` | ||||
|                 ); | ||||
| 
 | ||||
|                 await extractTar(archivePath, compressionMethod); | ||||
|             } finally { | ||||
|                 // Try to delete the archive to save space
 | ||||
|                 try { | ||||
|                     await utils.unlinkFile(archivePath); | ||||
|                 } catch (error) { | ||||
|                     core.debug(`Failed to delete archive: ${error}`); | ||||
|                 } | ||||
|             } | ||||
| 
 | ||||
|             const isExactKeyMatch = utils.isExactKeyMatch( | ||||
|                 primaryKey, | ||||
|                 cacheEntry | ||||
|             ); | ||||
|             const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey); | ||||
|             utils.setCacheHitOutput(isExactKeyMatch); | ||||
| 
 | ||||
|             core.info( | ||||
|                 `Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}` | ||||
|             ); | ||||
|             core.info(`Cache restored from key: ${cacheKey}`); | ||||
|         } catch (error) { | ||||
|             if (error.name === cache.ValidationError.name) { | ||||
|                 throw error; | ||||
|             } else { | ||||
|                 utils.logWarning(error.message); | ||||
|                 utils.setCacheHitOutput(false); | ||||
|             } | ||||
|         } | ||||
|     } catch (error) { | ||||
|         core.setFailed(error.message); | ||||
|     } | ||||
|  |  | |||
							
								
								
									
										59
									
								
								src/save.ts
								
								
								
								
							
							
						
						
									
										59
									
								
								src/save.ts
								
								
								
								
							|  | @ -1,9 +1,7 @@ | |||
| import * as cache from "@actions/cache"; | ||||
| import * as core from "@actions/core"; | ||||
| import * as path from "path"; | ||||
| 
 | ||||
| import * as cacheHttpClient from "./cacheHttpClient"; | ||||
| import { Events, Inputs, State } from "./constants"; | ||||
| import { createTar } from "./tar"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
| 
 | ||||
| async function run(): Promise<void> { | ||||
|  | @ -33,53 +31,22 @@ async function run(): Promise<void> { | |||
|             return; | ||||
|         } | ||||
| 
 | ||||
|         const compressionMethod = await utils.getCompressionMethod(); | ||||
| 
 | ||||
|         core.debug("Reserving Cache"); | ||||
|         const cacheId = await cacheHttpClient.reserveCache(primaryKey, { | ||||
|             compressionMethod: compressionMethod | ||||
|         }); | ||||
|         if (cacheId == -1) { | ||||
|             core.info( | ||||
|                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
|         core.debug(`Cache ID: ${cacheId}`); | ||||
|         const cachePaths = await utils.resolvePaths( | ||||
|             core | ||||
|         const cachePaths = core | ||||
|             .getInput(Inputs.Path, { required: true }) | ||||
|             .split("\n") | ||||
|                 .filter(x => x !== "") | ||||
|         ); | ||||
|             .filter(x => x !== ""); | ||||
| 
 | ||||
|         core.debug("Cache Paths:"); | ||||
|         core.debug(`${JSON.stringify(cachePaths)}`); | ||||
| 
 | ||||
|         const archiveFolder = await utils.createTempDirectory(); | ||||
|         const archivePath = path.join( | ||||
|             archiveFolder, | ||||
|             utils.getCacheFileName(compressionMethod) | ||||
|         ); | ||||
| 
 | ||||
|         core.debug(`Archive Path: ${archivePath}`); | ||||
| 
 | ||||
|         await createTar(archiveFolder, cachePaths, compressionMethod); | ||||
| 
 | ||||
|         const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
 | ||||
|         const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|         core.debug(`File Size: ${archiveFileSize}`); | ||||
|         if (archiveFileSize > fileSizeLimit) { | ||||
|             utils.logWarning( | ||||
|                 `Cache size of ~${Math.round( | ||||
|                     archiveFileSize / (1024 * 1024) | ||||
|                 )} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.` | ||||
|             ); | ||||
|             return; | ||||
|         try { | ||||
|             await cache.saveCache(cachePaths, primaryKey); | ||||
|         } catch (error) { | ||||
|             if (error.name === cache.ValidationError.name) { | ||||
|                 throw error; | ||||
|             } else if (error.name === cache.ReserveCacheError.name) { | ||||
|                 core.info(error.message); | ||||
|             } else { | ||||
|                 utils.logWarning(error.message); | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         core.debug(`Saving Cache (ID: ${cacheId})`); | ||||
|         await cacheHttpClient.saveCache(cacheId, archivePath); | ||||
|     } catch (error) { | ||||
|         utils.logWarning(error.message); | ||||
|     } | ||||
|  |  | |||
							
								
								
									
										87
									
								
								src/tar.ts
								
								
								
								
							
							
						
						
									
										87
									
								
								src/tar.ts
								
								
								
								
							|  | @ -1,87 +0,0 @@ | |||
| import { exec } from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import { existsSync, writeFileSync } from "fs"; | ||||
| import * as path from "path"; | ||||
| 
 | ||||
| import { CompressionMethod } from "./constants"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
| 
 | ||||
| async function getTarPath(args: string[]): Promise<string> { | ||||
|     // Explicitly use BSD Tar on Windows
 | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     if (IS_WINDOWS) { | ||||
|         const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||
|         if (existsSync(systemTar)) { | ||||
|             return systemTar; | ||||
|         } else if (await utils.useGnuTar()) { | ||||
|             args.push("--force-local"); | ||||
|         } | ||||
|     } | ||||
|     return await io.which("tar", true); | ||||
| } | ||||
| 
 | ||||
| async function execTar(args: string[], cwd?: string): Promise<void> { | ||||
|     try { | ||||
|         await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd }); | ||||
|     } catch (error) { | ||||
|         throw new Error(`Tar failed with error: ${error?.message}`); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| function getWorkingDirectory(): string { | ||||
|     return process.env["GITHUB_WORKSPACE"] ?? process.cwd(); | ||||
| } | ||||
| 
 | ||||
| export async function extractTar( | ||||
|     archivePath: string, | ||||
|     compressionMethod: CompressionMethod | ||||
| ): Promise<void> { | ||||
|     // Create directory to extract tar into
 | ||||
|     const workingDirectory = getWorkingDirectory(); | ||||
|     await io.mkdirP(workingDirectory); | ||||
|     // --d: Decompress.
 | ||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|     // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|     const args = [ | ||||
|         ...(compressionMethod == CompressionMethod.Zstd | ||||
|             ? ["--use-compress-program", "zstd -d --long=30"] | ||||
|             : ["-z"]), | ||||
|         "-xf", | ||||
|         archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||
|         "-P", | ||||
|         "-C", | ||||
|         workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") | ||||
|     ]; | ||||
|     await execTar(args); | ||||
| } | ||||
| 
 | ||||
| export async function createTar( | ||||
|     archiveFolder: string, | ||||
|     sourceDirectories: string[], | ||||
|     compressionMethod: CompressionMethod | ||||
| ): Promise<void> { | ||||
|     // Write source directories to manifest.txt to avoid command length limits
 | ||||
|     const manifestFilename = "manifest.txt"; | ||||
|     const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|     writeFileSync( | ||||
|         path.join(archiveFolder, manifestFilename), | ||||
|         sourceDirectories.join("\n") | ||||
|     ); | ||||
|     // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|     // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|     const workingDirectory = getWorkingDirectory(); | ||||
|     const args = [ | ||||
|         ...(compressionMethod == CompressionMethod.Zstd | ||||
|             ? ["--use-compress-program", "zstd -T0 --long=30"] | ||||
|             : ["-z"]), | ||||
|         "-cf", | ||||
|         cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||
|         "-P", | ||||
|         "-C", | ||||
|         workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||
|         "--files-from", | ||||
|         manifestFilename | ||||
|     ]; | ||||
|     await execTar(args, archiveFolder); | ||||
| } | ||||
|  | @ -1,86 +1,35 @@ | |||
| import * as core from "@actions/core"; | ||||
| import * as exec from "@actions/exec"; | ||||
| import * as glob from "@actions/glob"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as fs from "fs"; | ||||
| import * as path from "path"; | ||||
| import * as util from "util"; | ||||
| import * as uuidV4 from "uuid/v4"; | ||||
| 
 | ||||
| import { | ||||
|     CacheFilename, | ||||
|     CompressionMethod, | ||||
|     Outputs, | ||||
|     RefKey, | ||||
|     State | ||||
| } from "../constants"; | ||||
| import { ArtifactCacheEntry } from "../contracts"; | ||||
| import { Outputs, RefKey, State } from "../constants"; | ||||
| 
 | ||||
| // From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
 | ||||
| export async function createTempDirectory(): Promise<string> { | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
| 
 | ||||
|     let tempDirectory: string = process.env["RUNNER_TEMP"] || ""; | ||||
| 
 | ||||
|     if (!tempDirectory) { | ||||
|         let baseLocation: string; | ||||
|         if (IS_WINDOWS) { | ||||
|             // On Windows use the USERPROFILE env variable
 | ||||
|             baseLocation = process.env["USERPROFILE"] || "C:\\"; | ||||
|         } else { | ||||
|             if (process.platform === "darwin") { | ||||
|                 baseLocation = "/Users"; | ||||
|             } else { | ||||
|                 baseLocation = "/home"; | ||||
|             } | ||||
|         } | ||||
|         tempDirectory = path.join(baseLocation, "actions", "temp"); | ||||
|     } | ||||
| 
 | ||||
|     const dest = path.join(tempDirectory, uuidV4.default()); | ||||
|     await io.mkdirP(dest); | ||||
|     return dest; | ||||
| } | ||||
| 
 | ||||
| export function getArchiveFileSize(path: string): number { | ||||
|     return fs.statSync(path).size; | ||||
| } | ||||
| 
 | ||||
| export function isExactKeyMatch( | ||||
|     key: string, | ||||
|     cacheResult?: ArtifactCacheEntry | ||||
| ): boolean { | ||||
| export function isExactKeyMatch(key: string, cacheKey?: string): boolean { | ||||
|     return !!( | ||||
|         cacheResult && | ||||
|         cacheResult.cacheKey && | ||||
|         cacheResult.cacheKey.localeCompare(key, undefined, { | ||||
|         cacheKey && | ||||
|         cacheKey.localeCompare(key, undefined, { | ||||
|             sensitivity: "accent" | ||||
|         }) === 0 | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| export function setCacheState(state: ArtifactCacheEntry): void { | ||||
|     core.saveState(State.CacheResult, JSON.stringify(state)); | ||||
| export function setCacheState(state: string): void { | ||||
|     core.saveState(State.CacheResult, state); | ||||
| } | ||||
| 
 | ||||
| export function setCacheHitOutput(isCacheHit: boolean): void { | ||||
|     core.setOutput(Outputs.CacheHit, isCacheHit.toString()); | ||||
| } | ||||
| 
 | ||||
| export function setOutputAndState( | ||||
|     key: string, | ||||
|     cacheResult?: ArtifactCacheEntry | ||||
| ): void { | ||||
|     setCacheHitOutput(isExactKeyMatch(key, cacheResult)); | ||||
| export function setOutputAndState(key: string, cacheKey?: string): void { | ||||
|     setCacheHitOutput(isExactKeyMatch(key, cacheKey)); | ||||
|     // Store the cache result if it exists
 | ||||
|     cacheResult && setCacheState(cacheResult); | ||||
|     cacheKey && setCacheState(cacheKey); | ||||
| } | ||||
| 
 | ||||
| export function getCacheState(): ArtifactCacheEntry | undefined { | ||||
|     const stateData = core.getState(State.CacheResult); | ||||
|     core.debug(`State: ${stateData}`); | ||||
|     if (stateData) { | ||||
|         return JSON.parse(stateData) as ArtifactCacheEntry; | ||||
| export function getCacheState(): string | undefined { | ||||
|     const cacheKey = core.getState(State.CacheResult); | ||||
|     if (cacheKey) { | ||||
|         core.debug(`Cache state/key: ${cacheKey}`); | ||||
|         return cacheKey; | ||||
|     } | ||||
| 
 | ||||
|     return undefined; | ||||
|  | @ -91,70 +40,8 @@ export function logWarning(message: string): void { | |||
|     core.info(`${warningPrefix}${message}`); | ||||
| } | ||||
| 
 | ||||
| export async function resolvePaths(patterns: string[]): Promise<string[]> { | ||||
|     const paths: string[] = []; | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd(); | ||||
|     const globber = await glob.create(patterns.join("\n"), { | ||||
|         implicitDescendants: false | ||||
|     }); | ||||
| 
 | ||||
|     for await (const file of globber.globGenerator()) { | ||||
|         const relativeFile = path.relative(workspace, file); | ||||
|         core.debug(`Matched: ${relativeFile}`); | ||||
|         // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|         paths.push(`${relativeFile}`); | ||||
|     } | ||||
| 
 | ||||
|     return paths; | ||||
| } | ||||
| 
 | ||||
| // Cache token authorized for all events that are tied to a ref
 | ||||
| // See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
 | ||||
| export function isValidEvent(): boolean { | ||||
|     return RefKey in process.env && Boolean(process.env[RefKey]); | ||||
| } | ||||
| 
 | ||||
| export function unlinkFile(path: fs.PathLike): Promise<void> { | ||||
|     return util.promisify(fs.unlink)(path); | ||||
| } | ||||
| 
 | ||||
| async function getVersion(app: string): Promise<string> { | ||||
|     core.debug(`Checking ${app} --version`); | ||||
|     let versionOutput = ""; | ||||
|     try { | ||||
|         await exec.exec(`${app} --version`, [], { | ||||
|             ignoreReturnCode: true, | ||||
|             silent: true, | ||||
|             listeners: { | ||||
|                 stdout: (data: Buffer): string => | ||||
|                     (versionOutput += data.toString()), | ||||
|                 stderr: (data: Buffer): string => | ||||
|                     (versionOutput += data.toString()) | ||||
|             } | ||||
|         }); | ||||
|     } catch (err) { | ||||
|         core.debug(err.message); | ||||
|     } | ||||
| 
 | ||||
|     versionOutput = versionOutput.trim(); | ||||
|     core.debug(versionOutput); | ||||
|     return versionOutput; | ||||
| } | ||||
| 
 | ||||
| export async function getCompressionMethod(): Promise<CompressionMethod> { | ||||
|     const versionOutput = await getVersion("zstd"); | ||||
|     return versionOutput.toLowerCase().includes("zstd command line interface") | ||||
|         ? CompressionMethod.Zstd | ||||
|         : CompressionMethod.Gzip; | ||||
| } | ||||
| 
 | ||||
| export function getCacheFileName(compressionMethod: CompressionMethod): string { | ||||
|     return compressionMethod == CompressionMethod.Zstd | ||||
|         ? CacheFilename.Zstd | ||||
|         : CacheFilename.Gzip; | ||||
| } | ||||
| 
 | ||||
| export async function useGnuTar(): Promise<boolean> { | ||||
|     const versionOutput = await getVersion("tar"); | ||||
|     return versionOutput.toLowerCase().includes("gnu tar"); | ||||
| } | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue
	
	 Aiqiao Yan
						Aiqiao Yan